From 6a7e4d87c7d725371b538f7a8453461210ce93fd Mon Sep 17 00:00:00 2001 From: Jineshdarjee Date: Mon, 30 Dec 2024 15:43:06 +0530 Subject: [PATCH 1/3] Modified the test cases --- package.json | 76 +- test/specs/mainnet/connext/arbitrum.spec.js | 1261 ----- test/specs/mainnet/connext/matic.spec.js | 1261 ----- .../mainnet/connext/newWallet_connext.spec.js | 1360 +++++ .../mainnet/connext/newWallet_xdai.spec.js | 1229 ----- .../mainnet/connext/oldWallet_connext.spec.js | 1429 +++++ test/specs/mainnet/connext/optimism.spec.js | 1261 ----- test/specs/mainnet/connext/xdai.spec.js | 1254 ----- .../mainnet/listAndRates/arbitrum.spec.js | 820 --- test/specs/mainnet/listAndRates/matic.spec.js | 820 --- .../newWallet_listAndRates.spec.js | 849 +++ .../listAndRates/newWallet_xdai.spec.js | 797 --- .../oldWallet_listAndRates.spec.js | 918 ++++ .../mainnet/listAndRates/optimism.spec.js | 820 --- test/specs/mainnet/listAndRates/xdai.spec.js | 822 --- test/specs/mainnet/paymaster/arbitrum.spec.js | 4240 --------------- test/specs/mainnet/paymaster/matic.spec.js | 4240 --------------- .../paymaster/newWallet_paymaster.spec.js | 4564 ++++++++++++++++ .../mainnet/paymaster/newWallet_xdai.spec.js | 4267 --------------- .../paymaster/oldWallet_paymaster.spec.js | 4596 +++++++++++++++++ test/specs/mainnet/paymaster/optimism.spec.js | 4240 --------------- test/specs/mainnet/paymaster/xdai.spec.js | 4309 ---------------- .../postcondition_newWallet.spec.js | 468 +- .../precondition_newWallet.spec.js | 733 +-- test/specs/mainnet/swap/arbitrum.spec.js | 2206 -------- test/specs/mainnet/swap/matic.spec.js | 2206 -------- .../specs/mainnet/swap/newWallet_swap.spec.js | 2380 +++++++++ .../specs/mainnet/swap/newWallet_xdai.spec.js | 2213 -------- .../specs/mainnet/swap/oldWallet_swap.spec.js | 2412 +++++++++ test/specs/mainnet/swap/optimism.spec.js | 2206 -------- test/specs/mainnet/swap/xdai.spec.js | 2237 -------- .../transactionHistory/arbitrum.spec.js | 1806 ------- .../mainnet/transactionHistory/matic.spec.js | 1806 ------- .../newWallet_transactionHistory.spec.js | 1766 +++++++ .../transactionHistory/newWallet_xdai.spec.js | 1717 ------ .../oldWallet_transactionHistory.spec.js | 1798 +++++++ .../transactionHistory/optimism.spec.js | 1806 ------- .../mainnet/transactionHistory/xdai.spec.js | 1743 ------- .../transferringFunds/arbitrum.spec.js | 3687 ------------- .../mainnet/transferringFunds/matic.spec.js | 3687 ------------- .../newWallet_transferringFunds.spec.js | 3856 ++++++++++++++ .../transferringFunds/newWallet_xdai.spec.js | 3583 ------------- .../oldWallet_transferringFunds.spec.js | 3888 ++++++++++++++ .../transferringFunds/optimism.spec.js | 3687 ------------- .../mainnet/transferringFunds/xdai.spec.js | 3609 ------------- .../testnet/connext/newWallet_connext.spec.js | 1360 +++++ .../testnet/connext/oldWallet_connext.spec.js | 1429 +++++ test/specs/testnet/connext/sepolia.spec.js | 1276 ----- .../newWallet_listAndRates.spec.js | 849 +++ .../oldWallet_listAndRates.spec.js | 918 ++++ .../testnet/listAndRates/sepolia.spec.js | 849 --- test/specs/testnet/paymaster/amoy.spec.js | 4241 --------------- .../paymaster/newWallet_paymaster.spec.js | 4564 ++++++++++++++++ .../paymaster/oldWallet_paymaster.spec.js | 4596 +++++++++++++++++ test/specs/testnet/paymaster/sepolia.spec.js | 4242 --------------- .../postcondition_newWallet.spec.js | 290 ++ .../precondition_newWallet.spec.js | 421 ++ .../specs/testnet/swap/newWallet_swap.spec.js | 2380 +++++++++ .../specs/testnet/swap/oldWallet_swap.spec.js | 2412 +++++++++ .../newWallet_transactionHistory.spec.js | 1766 +++++++ .../oldWallet_transactionHistory.spec.js | 1798 +++++++ .../transactionHistory/sepolia.spec.js | 1793 ------- .../newWallet_transferringFunds.spec.js | 3856 ++++++++++++++ .../oldWallet_transferringFunds.spec.js | 3888 ++++++++++++++ .../testnet/transferringFunds/sepolia.spec.js | 3680 ------------- test/utils/sharedData_mainnet.js | 165 + test/utils/sharedData_testnet.js | 144 + 67 files changed, 61301 insertions(+), 86549 deletions(-) delete mode 100644 test/specs/mainnet/connext/arbitrum.spec.js delete mode 100644 test/specs/mainnet/connext/matic.spec.js create mode 100644 test/specs/mainnet/connext/newWallet_connext.spec.js delete mode 100644 test/specs/mainnet/connext/newWallet_xdai.spec.js create mode 100644 test/specs/mainnet/connext/oldWallet_connext.spec.js delete mode 100644 test/specs/mainnet/connext/optimism.spec.js delete mode 100644 test/specs/mainnet/connext/xdai.spec.js delete mode 100644 test/specs/mainnet/listAndRates/arbitrum.spec.js delete mode 100644 test/specs/mainnet/listAndRates/matic.spec.js create mode 100644 test/specs/mainnet/listAndRates/newWallet_listAndRates.spec.js delete mode 100644 test/specs/mainnet/listAndRates/newWallet_xdai.spec.js create mode 100644 test/specs/mainnet/listAndRates/oldWallet_listAndRates.spec.js delete mode 100644 test/specs/mainnet/listAndRates/optimism.spec.js delete mode 100644 test/specs/mainnet/listAndRates/xdai.spec.js delete mode 100644 test/specs/mainnet/paymaster/arbitrum.spec.js delete mode 100644 test/specs/mainnet/paymaster/matic.spec.js create mode 100644 test/specs/mainnet/paymaster/newWallet_paymaster.spec.js delete mode 100644 test/specs/mainnet/paymaster/newWallet_xdai.spec.js create mode 100644 test/specs/mainnet/paymaster/oldWallet_paymaster.spec.js delete mode 100644 test/specs/mainnet/paymaster/optimism.spec.js delete mode 100644 test/specs/mainnet/paymaster/xdai.spec.js delete mode 100644 test/specs/mainnet/swap/arbitrum.spec.js delete mode 100644 test/specs/mainnet/swap/matic.spec.js create mode 100644 test/specs/mainnet/swap/newWallet_swap.spec.js delete mode 100644 test/specs/mainnet/swap/newWallet_xdai.spec.js create mode 100644 test/specs/mainnet/swap/oldWallet_swap.spec.js delete mode 100644 test/specs/mainnet/swap/optimism.spec.js delete mode 100644 test/specs/mainnet/swap/xdai.spec.js delete mode 100644 test/specs/mainnet/transactionHistory/arbitrum.spec.js delete mode 100644 test/specs/mainnet/transactionHistory/matic.spec.js create mode 100644 test/specs/mainnet/transactionHistory/newWallet_transactionHistory.spec.js delete mode 100644 test/specs/mainnet/transactionHistory/newWallet_xdai.spec.js create mode 100644 test/specs/mainnet/transactionHistory/oldWallet_transactionHistory.spec.js delete mode 100644 test/specs/mainnet/transactionHistory/optimism.spec.js delete mode 100644 test/specs/mainnet/transactionHistory/xdai.spec.js delete mode 100644 test/specs/mainnet/transferringFunds/arbitrum.spec.js delete mode 100644 test/specs/mainnet/transferringFunds/matic.spec.js create mode 100644 test/specs/mainnet/transferringFunds/newWallet_transferringFunds.spec.js delete mode 100644 test/specs/mainnet/transferringFunds/newWallet_xdai.spec.js create mode 100644 test/specs/mainnet/transferringFunds/oldWallet_transferringFunds.spec.js delete mode 100644 test/specs/mainnet/transferringFunds/optimism.spec.js delete mode 100644 test/specs/mainnet/transferringFunds/xdai.spec.js create mode 100644 test/specs/testnet/connext/newWallet_connext.spec.js create mode 100644 test/specs/testnet/connext/oldWallet_connext.spec.js delete mode 100644 test/specs/testnet/connext/sepolia.spec.js create mode 100644 test/specs/testnet/listAndRates/newWallet_listAndRates.spec.js create mode 100644 test/specs/testnet/listAndRates/oldWallet_listAndRates.spec.js delete mode 100644 test/specs/testnet/listAndRates/sepolia.spec.js delete mode 100644 test/specs/testnet/paymaster/amoy.spec.js create mode 100644 test/specs/testnet/paymaster/newWallet_paymaster.spec.js create mode 100644 test/specs/testnet/paymaster/oldWallet_paymaster.spec.js delete mode 100644 test/specs/testnet/paymaster/sepolia.spec.js create mode 100644 test/specs/testnet/postcondition/postcondition_newWallet.spec.js create mode 100644 test/specs/testnet/precondition/precondition_newWallet.spec.js create mode 100644 test/specs/testnet/swap/newWallet_swap.spec.js create mode 100644 test/specs/testnet/swap/oldWallet_swap.spec.js create mode 100644 test/specs/testnet/transactionHistory/newWallet_transactionHistory.spec.js create mode 100644 test/specs/testnet/transactionHistory/oldWallet_transactionHistory.spec.js delete mode 100644 test/specs/testnet/transactionHistory/sepolia.spec.js create mode 100644 test/specs/testnet/transferringFunds/newWallet_transferringFunds.spec.js create mode 100644 test/specs/testnet/transferringFunds/oldWallet_transferringFunds.spec.js delete mode 100644 test/specs/testnet/transferringFunds/sepolia.spec.js create mode 100644 test/utils/sharedData_mainnet.js create mode 100644 test/utils/sharedData_testnet.js diff --git a/package.json b/package.json index 591719a..1dc1811 100644 --- a/package.json +++ b/package.json @@ -6,62 +6,30 @@ "type": "module", "scripts": { "test": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/transferringFunds/sepolia.spec.js --reporter mochawesome --require mochawesome/register --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/*/xdai.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-mainnet": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/*/oldWallet_*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", "test-mainnet-precondition": "mocha --timeout 600000 --spec test/specs/mainnet/precondition/*_newWallet.spec.js", "test-mainnet-postcondition": "mocha --timeout 600000 --spec test/specs/mainnet/postcondition/*_newWallet.spec.js", - "test-mainnet-newWallet": "npm run test-mainnet-precondition; mocha --timeout 600000 --spec test/specs/mainnet/*/*xdai.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always; npm run test-mainnet-postcondition", - "test-mainnet-combined": "npm run test-mainnet-precondition; mocha --timeout 600000 --spec test/specs/mainnet/listAndRates/xdai_newWallet.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always; npm run test-mainnet-postcondition", - "test-mainnet-transfertoken-arbitrum": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transferringFunds/arbitrum.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-transfertoken-matic": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transferringFunds/matic.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-transfertoken-optimism": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transferringFunds/optimism.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-transfertoken-xdai": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transferringFunds/xdai.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-transfertoken": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transferringFunds/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-transactionhistory-arbitrum": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transactionHistory/arbitrum.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-transactionhistory-matic": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transactionHistory/matic.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-transactionhistory-optimism": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transactionHistory/optimism.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-transactionhistory-xdai": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transactionHistory/xdai.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-transactionhistory": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transactionHistory/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-swap-arbitrum": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/swap/arbitrum.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-swap-matic": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/swap/matic.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-swap-optimism": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/swap/optimism.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-swap-xdai": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/swap/xdai.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-swap": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/swap/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-paymaster-arbitrum": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/paymaster/arbitrum.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-paymaster-matic": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/paymaster/matic.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-paymaster-optimism": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/paymaster/optimism.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-paymaster-xdai": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/paymaster/xdai.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-paymaster": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/paymaster/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-listandrates-arbitrum": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/listAndRates/arbitrum.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-listandrates-matic": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/listAndRates/matic.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-listandrates-optimism": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/listAndRates/optimism.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-listandrates-xdai": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/listAndRates/xdai.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-listandrates": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/listAndRates/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-getaddresses-arbitrum": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/getAddresses/arbitrum.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-getaddresses-matic": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/getAddresses/matic.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-getaddresses-optimism": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/getAddresses/optimism.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-getaddresses-xdai": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/getAddresses/xdai.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-getaddresses": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/getAddresses/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-connext-arbitrum": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/connext/arbitrum.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-connext-matic": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/connext/matic.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-connext-optimism": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/connext/optimism.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-connext-xdai": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/connext/xdai.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-connext": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/connext/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/*/sepolia.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-transfertoken-sepolia": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/transferringFunds/sepolia.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-transfertoken": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/transferringFunds/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-transactionhistory-sepolia": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/transactionHistory/sepolia.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-transactionhistory": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/transactionHistory/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-swap-sepolia": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/swap/sepolia.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-swap": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/swap/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-paymaster-sepolia": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/paymaster/sepolia.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-paymaster-amoy": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/paymaster/amoy.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-paymaster": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/paymaster/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-listandrates-sepolia": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/listAndRates/sepolia.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-listandrates": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/listAndRates/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-getaddresses-sepolia": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/getAddresses/sepolia.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-getaddresses": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/getAddresses/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-connext-sepolia": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/connext/sepolia.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-connext": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/connext/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-mainnet-newWallet": "npm run test-mainnet-precondition; mocha --timeout 600000 --spec test/specs/mainnet/*/newWallet_*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always; npm run test-mainnet-postcondition", + "test-mainnet-combined": "npm run test-mainnet-precondition; mocha --timeout 600000 --spec test/specs/mainnet/*/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always; npm run test-mainnet-postcondition", + "test-mainnet-transfertoken": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transferringFunds/*transferringFunds.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-mainnet-transactionhistory": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transactionHistory/*transactionHistory.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-mainnet-swap": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/swap/*swap.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-mainnet-paymaster": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/paymaster/*paymaster.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-mainnet-listandrates": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/listAndRates/*listAndRates.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-mainnet-getaddresses": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/getAddresses/*getAddresses.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-mainnet-connext": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/connext/*connext.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-testnet": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/*/oldWallet_.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-testnet-precondition": "mocha --timeout 600000 --spec test/specs/testnet/precondition/*_newWallet.spec.js", + "test-testnet-postcondition": "mocha --timeout 600000 --spec test/specs/testnet/postcondition/*_newWallet.spec.js", + "test-testnet-newWallet": "npm run test-testnet-precondition; mocha --timeout 600000 --spec test/specs/testnet/*/newWallet_*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always; npm run test-testnet-postcondition", + "test-testnet-combined": "npm run test-testnet-precondition; mocha --timeout 600000 --spec test/specs/testnet/*/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always; npm run test-testnet-postcondition", + "test-testnet-transfertoken": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/transferringFunds/*transferringFunds.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-testnet-transactionhistory": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/transactionHistory/*transactionHistory.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-testnet-swap": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/swap/*swap.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-testnet-paymaster": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/paymaster/*paymaster.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-testnet-listandrates": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/listAndRates/*listAndRates.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-testnet-getaddresses": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/getAddresses/*getAddresses.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-testnet-connext": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/connext/*connext.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", "test-arka-mumbai": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/arka/mumbai.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", "test-arka-goerli": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/arka/goerli.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", "test-arka": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/arka/mumbai.spec.js --spec test/specs/loadAndPerformance/arka/goerli.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", diff --git a/test/specs/mainnet/connext/arbitrum.spec.js b/test/specs/mainnet/connext/arbitrum.spec.js deleted file mode 100644 index 64ed535..0000000 --- a/test/specs/mainnet/connext/arbitrum.spec.js +++ /dev/null @@ -1,1261 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils, constants, BigNumber, ethers } from 'ethers'; -import { BridgingProvider } from '@etherspot/prime-sdk/dist/sdk/data/index.js'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let arbitrumMainNetSdk; -let arbitrumEtherspotWalletAddress; -let arbitrumNativeAddress = null; -let arbitrumDataService; -let runTest; - -describe('The PrimeSDK, Validate the connext endpoints with arbitrum network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - arbitrumMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.arbitrum_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.arbitrum_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - arbitrumMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - arbitrumEtherspotWalletAddress = - await arbitrumMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - arbitrumEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - arbitrumDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await arbitrumDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === arbitrumNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_arbitrumUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the all supported assets with valid details on the arbitrum network', async function () { - var test = this; - let allSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - allSupportedAssets = await arbitrumDataService.getSupportedAssets({}); - - try { - assert.isNotEmpty( - allSupportedAssets[0].symbol, - message.vali_connext_getSupportedAssets_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - allSupportedAssets[0].address, - message.vali_connext_getSupportedAssets_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - allSupportedAssets[0].decimals, - message.vali_connext_getSupportedAssets_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - allSupportedAssets[0].chainId, - message.vali_connext_getSupportedAssets_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - allSupportedAssets[0].icon, - message.vali_connext_getSupportedAssets_icon - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the supported assets with valid details on the arbitrum network', async function () { - var test = this; - let supportedAssets; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - supportedAssets = await arbitrumDataService.getSupportedAssets({ - chainId: data.arbitrum_chainid, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - supportedAssets[0].symbol, - message.vali_connext_getSupportedAssets_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - supportedAssets[0].address, - message.vali_connext_getSupportedAssets_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - supportedAssets[0].decimals, - message.vali_connext_getSupportedAssets_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.strictEqual( - supportedAssets[0].chainId, - data.arbitrum_chainid, - message.vali_connext_getSupportedAssets_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - supportedAssets[0].icon, - message.vali_connext_getSupportedAssets_icon - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get quotes with valid details on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - quotes[0].data, - message.vali_connext_getQuotes_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(quotes[0].to, message.vali_connext_getQuotes_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes[0].value, - message.vali_connext_getQuotes_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transaction status with valid details on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await arbitrumMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await arbitrumMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await arbitrumMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await arbitrumDataService.getTransactionStatus({ - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - transactionHash: userOpsReceipt.receipt.transactionHash, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - transactionStatus.status, - message.vali_connext_getTransactionStatus_status - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionStatus.transactionHash, - message.vali_connext_getTransactionStatus_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionStatus.connextscanUrl, - message.vali_connext_getTransactionStatus_connextscanUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the supported assets with invalid chainid on the arbitrum network', async function () { - var test = this; - let supportedAssets; - if (runTest) { - await customRetryAsync(async function () { - try { - supportedAssets = await arbitrumDataService.getSupportedAssets({ - chainId: data.invalid_arbitrum_chainid, - provider: BridgingProvider.Connext, - }); - - if (supportedAssets.length === 0) { - addContext(test, message.vali_connext_1); - console.log(message.vali_connext_1); - } else { - addContext(test, message.fail_connext_5); - assert.fail(message.fail_connext_5); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid sender address on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.invalidSender, - toAddress: data.recipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_7); - assert.fail(message.fail_connext_7); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_2); - console.log(message.vali_connext_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without sender address on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - toAddress: data.recipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_8); - assert.fail(message.fail_connext_8); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_3); - console.log(message.vali_connext_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect sender address on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.incorrectSender, - toAddress: data.recipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_9); - assert.fail(message.fail_connext_9); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_4); - console.log(message.vali_connext_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid recepient address on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.invalidRecipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_10); - assert.fail(message.fail_connext_10); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_5); - console.log(message.vali_connext_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without recepient address on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_11); - assert.fail(message.fail_connext_11); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_6); - console.log(message.vali_connext_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect recepient address on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.incorrectRecipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_12); - assert.fail(message.fail_connext_12); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_7); - console.log(message.vali_connext_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without fromChainid details on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_13); - assert.fail(message.fail_connext_13); - } catch (e) { - if (e.message === constant.invalid_address_9) { - addContext(test, message.vali_connext_8); - console.log(message.vali_connext_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without toChainid details on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.arbitrum_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_14); - assert.fail(message.fail_connext_14); - } catch (e) { - if (e.message === constant.invalid_address_10) { - addContext(test, message.vali_connext_9); - console.log(message.vali_connext_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid from token address on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.invalidTokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_15); - assert.fail(message.fail_connext_15); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_10); - console.log(message.vali_connext_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_15); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect from token address on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.incorrectTokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_16); - assert.fail(message.fail_connext_16); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_11); - console.log(message.vali_connext_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_16); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without from token address on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_17); - assert.fail(message.fail_connext_17); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_12); - console.log(message.vali_connext_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_17); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid value on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.invalidValue, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_18); - assert.fail(message.fail_connext_18); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_connext_13); - console.log(message.vali_connext_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_18); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with small value on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.smallValue, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_19); - assert.fail(message.fail_connext_19); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_connext_14); - console.log(message.vali_connext_14); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_19); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without value on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_20); - assert.fail(message.fail_connext_20); - } catch (e) { - if ( - e.errors[0].constraints.IsBigNumberish === - constant.invalid_bignumber_2 - ) { - addContext(test, message.vali_connext_15); - console.log(message.vali_connext_15); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_20); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without slippage on the arbitrum network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await arbitrumDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_21); - assert.fail(message.fail_connext_21); - } catch (e) { - if (e.message === constant.invalid_address_13) { - addContext(test, message.vali_connext_16); - console.log(message.vali_connext_16); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_21); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without fromChainId on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await arbitrumDataService.getTransactionStatus({ - toChainId: data.matic_chainid, - transactionHash: data.transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_22); - assert.fail(message.fail_connext_22); - } catch (e) { - if ( - e.errors[0].constraints.isPositive === constant.invalid_chainid_4 - ) { - addContext(test, message.vali_connext_17); - console.log(message.vali_connext_17); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_22); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without toChainId on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await arbitrumDataService.getTransactionStatus({ - fromChainId: data.invalid_arbitrum_chainid, - transactionHash: data.transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_23); - assert.fail(message.fail_connext_23); - } catch (e) { - if ( - e.errors[0].constraints.isPositive === constant.invalid_chainid_5 - ) { - addContext(test, message.vali_connext_18); - console.log(message.vali_connext_18); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_23); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status with invalid transactionHash on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await arbitrumDataService.getTransactionStatus({ - fromChainId: data.invalid_arbitrum_chainid, - toChainId: data.matic_chainid, - transactionHash: data.invalid_transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_24); - assert.fail(message.fail_connext_24); - } catch (e) { - if ( - e.errors[0].constraints.isHex === constant.transactionHash_32hex - ) { - addContext(test, message.vali_connext_19); - console.log(message.vali_connext_19); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_24); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status with incorrect transactionHash on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await arbitrumDataService.getTransactionStatus({ - fromChainId: data.invalid_arbitrum_chainid, - toChainId: data.matic_chainid, - transactionHash: data.incorrect_transactionHash, - provider: BridgingProvider.Connext, - }); - - if (transactionStatus.status === constant.invalid_chainid_6) { - addContext(test, message.vali_connext_20); - console.log(message.vali_connext_20); - } else { - addContext(test, eString); - assert.fail(message.fail_connext_25); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_25); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without transactionHash on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await arbitrumDataService.getTransactionStatus({ - fromChainId: data.invalid_arbitrum_chainid, - toChainId: data.matic_chainid, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_26); - assert.fail(message.fail_connext_26); - } catch (e) { - if ( - e.errors[0].constraints.isHex === constant.transactionHash_32hex - ) { - addContext(test, message.vali_connext_21); - console.log(message.vali_connext_21); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_26); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/connext/matic.spec.js b/test/specs/mainnet/connext/matic.spec.js deleted file mode 100644 index c919fcf..0000000 --- a/test/specs/mainnet/connext/matic.spec.js +++ /dev/null @@ -1,1261 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils, constants, BigNumber, ethers } from 'ethers'; -import { BridgingProvider } from '@etherspot/prime-sdk/dist/sdk/data/index.js'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let maticMainNetSdk; -let maticEtherspotWalletAddress; -let maticNativeAddress = null; -let maticDataService; -let runTest; - -describe('The PrimeSDK, Validate the connext endpoints with matic network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - maticMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.matic_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.matic_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - maticMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - maticEtherspotWalletAddress = - await maticMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - maticEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - maticDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await maticDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.matic_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === maticNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_maticUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the all supported assets with valid details on the matic network', async function () { - var test = this; - let allSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - allSupportedAssets = await maticDataService.getSupportedAssets({}); - - try { - assert.isNotEmpty( - allSupportedAssets[0].symbol, - message.vali_connext_getSupportedAssets_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - allSupportedAssets[0].address, - message.vali_connext_getSupportedAssets_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - allSupportedAssets[0].decimals, - message.vali_connext_getSupportedAssets_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - allSupportedAssets[0].chainId, - message.vali_connext_getSupportedAssets_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - allSupportedAssets[0].icon, - message.vali_connext_getSupportedAssets_icon - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the supported assets with valid details on the matic network', async function () { - var test = this; - let supportedAssets; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - supportedAssets = await maticDataService.getSupportedAssets({ - chainId: data.matic_chainid, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - supportedAssets[0].symbol, - message.vali_connext_getSupportedAssets_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - supportedAssets[0].address, - message.vali_connext_getSupportedAssets_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - supportedAssets[0].decimals, - message.vali_connext_getSupportedAssets_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.strictEqual( - supportedAssets[0].chainId, - data.matic_chainid, - message.vali_connext_getSupportedAssets_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - supportedAssets[0].icon, - message.vali_connext_getSupportedAssets_icon - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get quotes with valid details on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - quotes[0].data, - message.vali_connext_getQuotes_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(quotes[0].to, message.vali_connext_getQuotes_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes[0].value, - message.vali_connext_getQuotes_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transaction status with valid details on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await maticMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await maticMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await maticMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await maticDataService.getTransactionStatus({ - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - transactionHash: userOpsReceipt.receipt.transactionHash, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - transactionStatus.status, - message.vali_connext_getTransactionStatus_status - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionStatus.transactionHash, - message.vali_connext_getTransactionStatus_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionStatus.connextscanUrl, - message.vali_connext_getTransactionStatus_connextscanUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the supported assets with invalid chainid on the matic network', async function () { - var test = this; - let supportedAssets; - if (runTest) { - await customRetryAsync(async function () { - try { - supportedAssets = await maticDataService.getSupportedAssets({ - chainId: data.invalid_matic_chainid, - provider: BridgingProvider.Connext, - }); - - if (supportedAssets.length === 0) { - addContext(test, message.vali_connext_1); - console.log(message.vali_connext_1); - } else { - addContext(test, message.fail_connext_5); - assert.fail(message.fail_connext_5); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid sender address on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.invalidSender, - toAddress: data.recipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_7); - assert.fail(message.fail_connext_7); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_2); - console.log(message.vali_connext_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without sender address on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - toAddress: data.recipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_8); - assert.fail(message.fail_connext_8); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_3); - console.log(message.vali_connext_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect sender address on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.incorrectSender, - toAddress: data.recipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_9); - assert.fail(message.fail_connext_9); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_4); - console.log(message.vali_connext_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid recepient address on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.invalidRecipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_10); - assert.fail(message.fail_connext_10); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_5); - console.log(message.vali_connext_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without recepient address on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_11); - assert.fail(message.fail_connext_11); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_6); - console.log(message.vali_connext_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect recepient address on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.incorrectRecipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_12); - assert.fail(message.fail_connext_12); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_7); - console.log(message.vali_connext_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without fromChainid details on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_13); - assert.fail(message.fail_connext_13); - } catch (e) { - if (e.message === constant.invalid_address_9) { - addContext(test, message.vali_connext_8); - console.log(message.vali_connext_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without toChainid details on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_14); - assert.fail(message.fail_connext_14); - } catch (e) { - if (e.message === constant.invalid_address_10) { - addContext(test, message.vali_connext_9); - console.log(message.vali_connext_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid from token address on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.invalidTokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_15); - assert.fail(message.fail_connext_15); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_10); - console.log(message.vali_connext_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_15); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect from token address on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.incorrectTokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_16); - assert.fail(message.fail_connext_16); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_11); - console.log(message.vali_connext_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_16); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without from token address on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_17); - assert.fail(message.fail_connext_17); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_12); - console.log(message.vali_connext_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_17); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid value on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.invalidValue, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_18); - assert.fail(message.fail_connext_18); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_connext_13); - console.log(message.vali_connext_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_18); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with small value on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.smallValue, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_19); - assert.fail(message.fail_connext_19); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_connext_14); - console.log(message.vali_connext_14); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_19); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without value on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_20); - assert.fail(message.fail_connext_20); - } catch (e) { - if ( - e.errors[0].constraints.IsBigNumberish === - constant.invalid_bignumber_2 - ) { - addContext(test, message.vali_connext_15); - console.log(message.vali_connext_15); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_20); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without slippage on the matic network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await maticDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_21); - assert.fail(message.fail_connext_21); - } catch (e) { - if (e.message === constant.invalid_address_13) { - addContext(test, message.vali_connext_16); - console.log(message.vali_connext_16); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_21); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without fromChainId on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await maticDataService.getTransactionStatus({ - toChainId: data.matic_chainid, - transactionHash: data.transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_22); - assert.fail(message.fail_connext_22); - } catch (e) { - if ( - e.errors[0].constraints.isPositive === constant.invalid_chainid_4 - ) { - addContext(test, message.vali_connext_17); - console.log(message.vali_connext_17); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_22); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without toChainId on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await maticDataService.getTransactionStatus({ - fromChainId: data.invalid_matic_chainid, - transactionHash: data.transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_23); - assert.fail(message.fail_connext_23); - } catch (e) { - if ( - e.errors[0].constraints.isPositive === constant.invalid_chainid_5 - ) { - addContext(test, message.vali_connext_18); - console.log(message.vali_connext_18); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_23); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status with invalid transactionHash on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await maticDataService.getTransactionStatus({ - fromChainId: data.invalid_matic_chainid, - toChainId: data.matic_chainid, - transactionHash: data.invalid_transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_24); - assert.fail(message.fail_connext_24); - } catch (e) { - if ( - e.errors[0].constraints.isHex === constant.transactionHash_32hex - ) { - addContext(test, message.vali_connext_19); - console.log(message.vali_connext_19); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_24); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status with incorrect transactionHash on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await maticDataService.getTransactionStatus({ - fromChainId: data.invalid_matic_chainid, - toChainId: data.matic_chainid, - transactionHash: data.incorrect_transactionHash, - provider: BridgingProvider.Connext, - }); - - if (transactionStatus.status === constant.invalid_chainid_6) { - addContext(test, message.vali_connext_20); - console.log(message.vali_connext_20); - } else { - addContext(test, eString); - assert.fail(message.fail_connext_25); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_25); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without transactionHash on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await maticDataService.getTransactionStatus({ - fromChainId: data.invalid_matic_chainid, - toChainId: data.matic_chainid, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_26); - assert.fail(message.fail_connext_26); - } catch (e) { - if ( - e.errors[0].constraints.isHex === constant.transactionHash_32hex - ) { - addContext(test, message.vali_connext_21); - console.log(message.vali_connext_21); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_26); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/connext/newWallet_connext.spec.js b/test/specs/mainnet/connext/newWallet_connext.spec.js new file mode 100644 index 0000000..d006e62 --- /dev/null +++ b/test/specs/mainnet/connext/newWallet_connext.spec.js @@ -0,0 +1,1360 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { utils, ethers } from 'ethers'; +import { BridgingProvider } from '@etherspot/prime-sdk/dist/sdk/data/index.js'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidChainId, + randomInvalidTokenAddress, + randomToChainId, + randomTokenAddress, +} from '../../../utils/sharedData_mainnet.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let mainnetPrimeSdk; +let nativeAddress = null; +let dataService; +let runTest; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Validate the connext endpoints on the MainNet (with new wallet)', function () { + before(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }); + + it( + 'SMOKE: Validate the all supported assets with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let allSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + try { + allSupportedAssets = await dataService.getSupportedAssets({}); + + try { + assert.isNotEmpty( + allSupportedAssets[0].symbol, + message.vali_connext_getSupportedAssets_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + allSupportedAssets[0].address, + message.vali_connext_getSupportedAssets_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + allSupportedAssets[0].decimals, + message.vali_connext_getSupportedAssets_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + allSupportedAssets[0].chainId, + message.vali_connext_getSupportedAssets_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + allSupportedAssets[0].icon, + message.vali_connext_getSupportedAssets_icon + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the supported assets with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let supportedAssets; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + try { + supportedAssets = await dataService.getSupportedAssets({ + chainId: randomChainId, + provider: BridgingProvider.Connext, + }); + + try { + assert.isNotEmpty( + supportedAssets[0].symbol, + message.vali_connext_getSupportedAssets_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + supportedAssets[0].address, + message.vali_connext_getSupportedAssets_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + supportedAssets[0].decimals, + message.vali_connext_getSupportedAssets_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.strictEqual( + supportedAssets[0].chainId, + randomChainId, + message.vali_connext_getSupportedAssets_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + supportedAssets[0].icon, + message.vali_connext_getSupportedAssets_icon + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get quotes with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + try { + assert.isNotEmpty( + quotes[0].data, + message.vali_connext_getQuotes_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes[0].to, + message.vali_connext_getQuotes_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes[0].value, + message.vali_connext_getQuotes_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_3); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transaction status with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomChainId, + toChainId: randomToChainId, + transactionHash: userOpsReceipt.receipt.transactionHash, + provider: BridgingProvider.Connext, + }); + + try { + assert.isNotEmpty( + transactionStatus.status, + message.vali_connext_getTransactionStatus_status + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionStatus.transactionHash, + message.vali_connext_getTransactionStatus_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionStatus.connextscanUrl, + message.vali_connext_getTransactionStatus_connextscanUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_4); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid sender address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.invalidSender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_7); + assert.fail(message.fail_connext_7); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_7 + ) { + addContext(test, message.vali_connext_2); + console.log(message.vali_connext_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without sender address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_8); + assert.fail(message.fail_connext_8); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_7 + ) { + addContext(test, message.vali_connext_3); + console.log(message.vali_connext_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with incorrect sender address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.incorrectSender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_9); + assert.fail(message.fail_connext_9); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_7 + ) { + addContext(test, message.vali_connext_4); + console.log(message.vali_connext_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid recepient address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.invalidRecipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_10); + assert.fail(message.fail_connext_10); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_8 + ) { + addContext(test, message.vali_connext_5); + console.log(message.vali_connext_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without recepient address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_11); + assert.fail(message.fail_connext_11); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_8 + ) { + addContext(test, message.vali_connext_6); + console.log(message.vali_connext_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with incorrect recepient address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.incorrectRecipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_12); + assert.fail(message.fail_connext_12); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_8 + ) { + addContext(test, message.vali_connext_7); + console.log(message.vali_connext_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_12); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without fromChainid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_13); + assert.fail(message.fail_connext_13); + } catch (e) { + if (e.message === constant.invalid_address_9) { + addContext(test, message.vali_connext_8); + console.log(message.vali_connext_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without toChainid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_14); + assert.fail(message.fail_connext_14); + } catch (e) { + if (e.message === constant.invalid_address_10) { + addContext(test, message.vali_connext_9); + console.log(message.vali_connext_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_14); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid from token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomInvalidTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_15); + assert.fail(message.fail_connext_15); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_11 + ) { + addContext(test, message.vali_connext_10); + console.log(message.vali_connext_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_15); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with incorrect from token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomIncorrectTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_16); + assert.fail(message.fail_connext_16); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_11 + ) { + addContext(test, message.vali_connext_11); + console.log(message.vali_connext_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_16); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without from token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_17); + assert.fail(message.fail_connext_17); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_11 + ) { + addContext(test, message.vali_connext_12); + console.log(message.vali_connext_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_17); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid value on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.invalidValue, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_18); + assert.fail(message.fail_connext_18); + } catch (e) { + if (e.reason === constant.invalid_value_1) { + addContext(test, message.vali_connext_13); + console.log(message.vali_connext_13); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_18); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with small value on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.smallValue, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_19); + assert.fail(message.fail_connext_19); + } catch (e) { + if (e.reason === constant.invalid_value_2) { + addContext(test, message.vali_connext_14); + console.log(message.vali_connext_14); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_19); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without value on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_20); + assert.fail(message.fail_connext_20); + } catch (e) { + if ( + e.errors[0].constraints.IsBigNumberish === + constant.invalid_bignumber_2 + ) { + addContext(test, message.vali_connext_15); + console.log(message.vali_connext_15); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_20); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without slippage on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_21); + assert.fail(message.fail_connext_21); + } catch (e) { + if (e.message === constant.invalid_address_13) { + addContext(test, message.vali_connext_16); + console.log(message.vali_connext_16); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_21); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status without fromChainId on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + toChainId: randomToChainId, + transactionHash: data.transactionHash, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_22); + assert.fail(message.fail_connext_22); + } catch (e) { + if ( + e.errors[0].constraints.isPositive === constant.invalid_chainid_4 + ) { + addContext(test, message.vali_connext_17); + console.log(message.vali_connext_17); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_22); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status without toChainId on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + transactionHash: data.transactionHash, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_23); + assert.fail(message.fail_connext_23); + } catch (e) { + if ( + e.errors[0].constraints.isPositive === constant.invalid_chainid_5 + ) { + addContext(test, message.vali_connext_18); + console.log(message.vali_connext_18); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_23); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status with invalid transactionHash on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + toChainId: randomToChainId, + transactionHash: data.invalid_transactionHash, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_24); + assert.fail(message.fail_connext_24); + } catch (e) { + if ( + e.errors[0].constraints.isHex === constant.transactionHash_32hex + ) { + addContext(test, message.vali_connext_19); + console.log(message.vali_connext_19); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_24); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status with incorrect transactionHash on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + toChainId: randomToChainId, + transactionHash: data.incorrect_transactionHash, + provider: BridgingProvider.Connext, + }); + + if (transactionStatus.status === constant.invalid_chainid_6) { + addContext(test, message.vali_connext_20); + console.log(message.vali_connext_20); + } else { + addContext(test, eString); + assert.fail(message.fail_connext_25); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_25); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status without transactionHash on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + toChainId: randomToChainId, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_26); + assert.fail(message.fail_connext_26); + } catch (e) { + if ( + e.errors[0].constraints.isHex === constant.transactionHash_32hex + ) { + addContext(test, message.vali_connext_21); + console.log(message.vali_connext_21); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_26); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/mainnet/connext/newWallet_xdai.spec.js b/test/specs/mainnet/connext/newWallet_xdai.spec.js deleted file mode 100644 index 3bed2b8..0000000 --- a/test/specs/mainnet/connext/newWallet_xdai.spec.js +++ /dev/null @@ -1,1229 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils, ethers } from 'ethers'; -import { BridgingProvider } from '@etherspot/prime-sdk/dist/sdk/data/index.js'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import testUtils from '../../../utils/testUtils.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; -import { dirname } from 'path'; -import { fileURLToPath } from 'url'; -import fs from 'fs'; -import path from 'path'; - -let xdaiMainNetSdk; -let xdaiNativeAddress = null; -let xdaiDataService; -let runTest; -const __dirname = dirname(fileURLToPath(import.meta.url)); - -describe('The PrimeSDK, Validate the connext endpoints with xdai network on the MainNet (with new wallet)', function () { - before(async function () { - const filePath = path.join(__dirname, '../../../utils/testUtils.json'); - const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); - - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: sharedState.newPrivateKey }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // initializating Data service... - try { - xdaiDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - }, data.retry); // Retry this async test up to 5 times - }); - - beforeEach(async function () { - // validate the balance of the wallet - try { - let output = await xdaiDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === xdaiNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_xdaiUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }); - - it('SMOKE: Validate the all supported assets with valid details on the xdai network', async function () { - var test = this; - let allSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - allSupportedAssets = await xdaiDataService.getSupportedAssets({}); - - try { - assert.isNotEmpty( - allSupportedAssets[0].symbol, - message.vali_connext_getSupportedAssets_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - allSupportedAssets[0].address, - message.vali_connext_getSupportedAssets_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - allSupportedAssets[0].decimals, - message.vali_connext_getSupportedAssets_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - allSupportedAssets[0].chainId, - message.vali_connext_getSupportedAssets_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - allSupportedAssets[0].icon, - message.vali_connext_getSupportedAssets_icon - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the supported assets with valid details on the xdai network', async function () { - var test = this; - let supportedAssets; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - supportedAssets = await xdaiDataService.getSupportedAssets({ - chainId: data.xdai_chainid, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - supportedAssets[0].symbol, - message.vali_connext_getSupportedAssets_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - supportedAssets[0].address, - message.vali_connext_getSupportedAssets_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - supportedAssets[0].decimals, - message.vali_connext_getSupportedAssets_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.strictEqual( - supportedAssets[0].chainId, - data.xdai_chainid, - message.vali_connext_getSupportedAssets_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - supportedAssets[0].icon, - message.vali_connext_getSupportedAssets_icon - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get quotes with valid details on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - quotes[0].data, - message.vali_connext_getQuotes_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(quotes[0].to, message.vali_connext_getQuotes_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes[0].value, - message.vali_connext_getQuotes_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transaction status with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await xdaiDataService.getTransactionStatus({ - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - transactionHash: userOpsReceipt.receipt.transactionHash, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - transactionStatus.status, - message.vali_connext_getTransactionStatus_status - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionStatus.transactionHash, - message.vali_connext_getTransactionStatus_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionStatus.connextscanUrl, - message.vali_connext_getTransactionStatus_connextscanUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid sender address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.invalidSender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_7); - assert.fail(message.fail_connext_7); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_2); - console.log(message.vali_connext_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without sender address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_8); - assert.fail(message.fail_connext_8); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_3); - console.log(message.vali_connext_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect sender address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.incorrectSender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_9); - assert.fail(message.fail_connext_9); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_4); - console.log(message.vali_connext_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid recepient address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.invalidRecipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_10); - assert.fail(message.fail_connext_10); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_5); - console.log(message.vali_connext_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without recepient address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_11); - assert.fail(message.fail_connext_11); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_6); - console.log(message.vali_connext_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect recepient address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.incorrectRecipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_12); - assert.fail(message.fail_connext_12); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_7); - console.log(message.vali_connext_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without fromChainid details on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_13); - assert.fail(message.fail_connext_13); - } catch (e) { - if (e.message === constant.invalid_address_9) { - addContext(test, message.vali_connext_8); - console.log(message.vali_connext_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without toChainid details on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_14); - assert.fail(message.fail_connext_14); - } catch (e) { - if (e.message === constant.invalid_address_10) { - addContext(test, message.vali_connext_9); - console.log(message.vali_connext_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid from token address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.invalidTokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_15); - assert.fail(message.fail_connext_15); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_10); - console.log(message.vali_connext_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_15); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect from token address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.incorrectTokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_16); - assert.fail(message.fail_connext_16); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_11); - console.log(message.vali_connext_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_16); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without from token address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_17); - assert.fail(message.fail_connext_17); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_12); - console.log(message.vali_connext_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_17); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid value on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.invalidValue, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_18); - assert.fail(message.fail_connext_18); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_connext_13); - console.log(message.vali_connext_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_18); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with small value on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.smallValue, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_19); - assert.fail(message.fail_connext_19); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_connext_14); - console.log(message.vali_connext_14); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_19); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without value on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_20); - assert.fail(message.fail_connext_20); - } catch (e) { - if ( - e.errors[0].constraints.IsBigNumberish === - constant.invalid_bignumber_2 - ) { - addContext(test, message.vali_connext_15); - console.log(message.vali_connext_15); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_20); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without slippage on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_21); - assert.fail(message.fail_connext_21); - } catch (e) { - if (e.message === constant.invalid_address_13) { - addContext(test, message.vali_connext_16); - console.log(message.vali_connext_16); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_21); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without fromChainId on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await xdaiDataService.getTransactionStatus({ - toChainId: data.matic_chainid, - transactionHash: data.transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_22); - assert.fail(message.fail_connext_22); - } catch (e) { - if ( - e.errors[0].constraints.isPositive === constant.invalid_chainid_4 - ) { - addContext(test, message.vali_connext_17); - console.log(message.vali_connext_17); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_22); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without toChainId on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await xdaiDataService.getTransactionStatus({ - fromChainId: data.invalid_xdai_chainid, - transactionHash: data.transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_23); - assert.fail(message.fail_connext_23); - } catch (e) { - if ( - e.errors[0].constraints.isPositive === constant.invalid_chainid_5 - ) { - addContext(test, message.vali_connext_18); - console.log(message.vali_connext_18); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_23); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status with invalid transactionHash on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await xdaiDataService.getTransactionStatus({ - fromChainId: data.invalid_xdai_chainid, - toChainId: data.matic_chainid, - transactionHash: data.invalid_transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_24); - assert.fail(message.fail_connext_24); - } catch (e) { - if ( - e.errors[0].constraints.isHex === constant.transactionHash_32hex - ) { - addContext(test, message.vali_connext_19); - console.log(message.vali_connext_19); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_24); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status with incorrect transactionHash on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await xdaiDataService.getTransactionStatus({ - fromChainId: data.invalid_xdai_chainid, - toChainId: data.matic_chainid, - transactionHash: data.incorrect_transactionHash, - provider: BridgingProvider.Connext, - }); - - if (transactionStatus.status === constant.invalid_chainid_6) { - addContext(test, message.vali_connext_20); - console.log(message.vali_connext_20); - } else { - addContext(test, eString); - assert.fail(message.fail_connext_25); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_25); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without transactionHash on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await xdaiDataService.getTransactionStatus({ - fromChainId: data.invalid_xdai_chainid, - toChainId: data.matic_chainid, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_26); - assert.fail(message.fail_connext_26); - } catch (e) { - if ( - e.errors[0].constraints.isHex === constant.transactionHash_32hex - ) { - addContext(test, message.vali_connext_21); - console.log(message.vali_connext_21); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_26); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/connext/oldWallet_connext.spec.js b/test/specs/mainnet/connext/oldWallet_connext.spec.js new file mode 100644 index 0000000..18871c0 --- /dev/null +++ b/test/specs/mainnet/connext/oldWallet_connext.spec.js @@ -0,0 +1,1429 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { utils, constants, BigNumber, ethers } from 'ethers'; +import { BridgingProvider } from '@etherspot/prime-sdk/dist/sdk/data/index.js'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidChainId, + randomInvalidTokenAddress, + randomToChainId, + randomTokenAddress, +} from '../../../utils/sharedData_mainnet.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; + +let mainnetPrimeSdk; +let etherspotWalletAddress; +let nativeAddress = null; +let dataService; +let runTest; + +describe('Validate the connext endpoints on the MainNet (with old wallet)', function () { + before(async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + + try { + assert.strictEqual( + mainnetPrimeSdk.state.EOAAddress, + data.eoaAddress, + message.vali_eoa_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get EtherspotWallet address + try { + etherspotWalletAddress = + await mainnetPrimeSdk.getCounterFactualAddress(); + + try { + assert.strictEqual( + etherspotWalletAddress, + data.sender, + message.vali_smart_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e.message); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + var test = this; + + await customRetryAsync(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + it( + 'SMOKE: Validate the all supported assets with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let allSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + try { + allSupportedAssets = await dataService.getSupportedAssets({}); + + try { + assert.isNotEmpty( + allSupportedAssets[0].symbol, + message.vali_connext_getSupportedAssets_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + allSupportedAssets[0].address, + message.vali_connext_getSupportedAssets_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + allSupportedAssets[0].decimals, + message.vali_connext_getSupportedAssets_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + allSupportedAssets[0].chainId, + message.vali_connext_getSupportedAssets_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + allSupportedAssets[0].icon, + message.vali_connext_getSupportedAssets_icon + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the supported assets with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let supportedAssets; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + try { + supportedAssets = await dataService.getSupportedAssets({ + chainId: randomChainId, + provider: BridgingProvider.Connext, + }); + + try { + assert.isNotEmpty( + supportedAssets[0].symbol, + message.vali_connext_getSupportedAssets_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + supportedAssets[0].address, + message.vali_connext_getSupportedAssets_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + supportedAssets[0].decimals, + message.vali_connext_getSupportedAssets_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.strictEqual( + supportedAssets[0].chainId, + randomChainId, + message.vali_connext_getSupportedAssets_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + supportedAssets[0].icon, + message.vali_connext_getSupportedAssets_icon + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get quotes with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + try { + assert.isNotEmpty( + quotes[0].data, + message.vali_connext_getQuotes_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes[0].to, + message.vali_connext_getQuotes_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes[0].value, + message.vali_connext_getQuotes_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_3); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transaction status with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomChainId, + toChainId: randomToChainId, + transactionHash: userOpsReceipt.receipt.transactionHash, + provider: BridgingProvider.Connext, + }); + + try { + assert.isNotEmpty( + transactionStatus.status, + message.vali_connext_getTransactionStatus_status + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionStatus.transactionHash, + message.vali_connext_getTransactionStatus_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionStatus.connextscanUrl, + message.vali_connext_getTransactionStatus_connextscanUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_4); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid sender address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.invalidSender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_7); + assert.fail(message.fail_connext_7); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_7 + ) { + addContext(test, message.vali_connext_2); + console.log(message.vali_connext_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without sender address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_8); + assert.fail(message.fail_connext_8); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_7 + ) { + addContext(test, message.vali_connext_3); + console.log(message.vali_connext_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with incorrect sender address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.incorrectSender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_9); + assert.fail(message.fail_connext_9); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_7 + ) { + addContext(test, message.vali_connext_4); + console.log(message.vali_connext_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid recepient address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.invalidRecipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_10); + assert.fail(message.fail_connext_10); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_8 + ) { + addContext(test, message.vali_connext_5); + console.log(message.vali_connext_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without recepient address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_11); + assert.fail(message.fail_connext_11); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_8 + ) { + addContext(test, message.vali_connext_6); + console.log(message.vali_connext_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with incorrect recepient address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.incorrectRecipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_12); + assert.fail(message.fail_connext_12); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_8 + ) { + addContext(test, message.vali_connext_7); + console.log(message.vali_connext_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_12); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without fromChainid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_13); + assert.fail(message.fail_connext_13); + } catch (e) { + if (e.message === constant.invalid_address_9) { + addContext(test, message.vali_connext_8); + console.log(message.vali_connext_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without toChainid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_14); + assert.fail(message.fail_connext_14); + } catch (e) { + if (e.message === constant.invalid_address_10) { + addContext(test, message.vali_connext_9); + console.log(message.vali_connext_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_14); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid from token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomInvalidTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_15); + assert.fail(message.fail_connext_15); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_11 + ) { + addContext(test, message.vali_connext_10); + console.log(message.vali_connext_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_15); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with incorrect from token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomIncorrectTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_16); + assert.fail(message.fail_connext_16); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_11 + ) { + addContext(test, message.vali_connext_11); + console.log(message.vali_connext_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_16); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without from token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_17); + assert.fail(message.fail_connext_17); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_11 + ) { + addContext(test, message.vali_connext_12); + console.log(message.vali_connext_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_17); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid value on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.invalidValue, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_18); + assert.fail(message.fail_connext_18); + } catch (e) { + if (e.reason === constant.invalid_value_1) { + addContext(test, message.vali_connext_13); + console.log(message.vali_connext_13); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_18); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with small value on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.smallValue, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_19); + assert.fail(message.fail_connext_19); + } catch (e) { + if (e.reason === constant.invalid_value_2) { + addContext(test, message.vali_connext_14); + console.log(message.vali_connext_14); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_19); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without value on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_20); + assert.fail(message.fail_connext_20); + } catch (e) { + if ( + e.errors[0].constraints.IsBigNumberish === + constant.invalid_bignumber_2 + ) { + addContext(test, message.vali_connext_15); + console.log(message.vali_connext_15); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_20); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without slippage on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_21); + assert.fail(message.fail_connext_21); + } catch (e) { + if (e.message === constant.invalid_address_13) { + addContext(test, message.vali_connext_16); + console.log(message.vali_connext_16); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_21); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status without fromChainId on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + toChainId: randomToChainId, + transactionHash: data.transactionHash, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_22); + assert.fail(message.fail_connext_22); + } catch (e) { + if ( + e.errors[0].constraints.isPositive === constant.invalid_chainid_4 + ) { + addContext(test, message.vali_connext_17); + console.log(message.vali_connext_17); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_22); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status without toChainId on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + transactionHash: data.transactionHash, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_23); + assert.fail(message.fail_connext_23); + } catch (e) { + if ( + e.errors[0].constraints.isPositive === constant.invalid_chainid_5 + ) { + addContext(test, message.vali_connext_18); + console.log(message.vali_connext_18); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_23); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status with invalid transactionHash on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + toChainId: randomToChainId, + transactionHash: data.invalid_transactionHash, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_24); + assert.fail(message.fail_connext_24); + } catch (e) { + if ( + e.errors[0].constraints.isHex === constant.transactionHash_32hex + ) { + addContext(test, message.vali_connext_19); + console.log(message.vali_connext_19); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_24); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status with incorrect transactionHash on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + toChainId: randomToChainId, + transactionHash: data.incorrect_transactionHash, + provider: BridgingProvider.Connext, + }); + + if (transactionStatus.status === constant.invalid_chainid_6) { + addContext(test, message.vali_connext_20); + console.log(message.vali_connext_20); + } else { + addContext(test, eString); + assert.fail(message.fail_connext_25); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_25); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status without transactionHash on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + toChainId: randomToChainId, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_26); + assert.fail(message.fail_connext_26); + } catch (e) { + if ( + e.errors[0].constraints.isHex === constant.transactionHash_32hex + ) { + addContext(test, message.vali_connext_21); + console.log(message.vali_connext_21); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_26); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/mainnet/connext/optimism.spec.js b/test/specs/mainnet/connext/optimism.spec.js deleted file mode 100644 index b63e8f3..0000000 --- a/test/specs/mainnet/connext/optimism.spec.js +++ /dev/null @@ -1,1261 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils, constants, BigNumber, ethers } from 'ethers'; -import { BridgingProvider } from '@etherspot/prime-sdk/dist/sdk/data/index.js'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let optimismMainNetSdk; -let optimismEtherspotWalletAddress; -let optimismNativeAddress = null; -let optimismDataService; -let runTest; - -describe('The PrimeSDK, Validate the connext endpoints with optimism network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - optimismMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.optimism_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.optimism_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - optimismMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - optimismEtherspotWalletAddress = - await optimismMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - optimismEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - optimismDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await optimismDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === optimismNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_optimismUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the all supported assets with valid details on the optimism network', async function () { - var test = this; - let allSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - allSupportedAssets = await optimismDataService.getSupportedAssets({}); - - try { - assert.isNotEmpty( - allSupportedAssets[0].symbol, - message.vali_connext_getSupportedAssets_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - allSupportedAssets[0].address, - message.vali_connext_getSupportedAssets_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - allSupportedAssets[0].decimals, - message.vali_connext_getSupportedAssets_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - allSupportedAssets[0].chainId, - message.vali_connext_getSupportedAssets_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - allSupportedAssets[0].icon, - message.vali_connext_getSupportedAssets_icon - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the supported assets with valid details on the optimism network', async function () { - var test = this; - let supportedAssets; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - supportedAssets = await optimismDataService.getSupportedAssets({ - chainId: data.optimism_chainid, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - supportedAssets[0].symbol, - message.vali_connext_getSupportedAssets_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - supportedAssets[0].address, - message.vali_connext_getSupportedAssets_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - supportedAssets[0].decimals, - message.vali_connext_getSupportedAssets_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.strictEqual( - supportedAssets[0].chainId, - data.optimism_chainid, - message.vali_connext_getSupportedAssets_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - supportedAssets[0].icon, - message.vali_connext_getSupportedAssets_icon - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get quotes with valid details on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - quotes[0].data, - message.vali_connext_getQuotes_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(quotes[0].to, message.vali_connext_getQuotes_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes[0].value, - message.vali_connext_getQuotes_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transaction status with valid details on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await optimismMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await optimismMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await optimismMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await optimismDataService.getTransactionStatus({ - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - transactionHash: userOpsReceipt.receipt.transactionHash, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - transactionStatus.status, - message.vali_connext_getTransactionStatus_status - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionStatus.transactionHash, - message.vali_connext_getTransactionStatus_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionStatus.connextscanUrl, - message.vali_connext_getTransactionStatus_connextscanUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the supported assets with invalid chainid on the optimism network', async function () { - var test = this; - let supportedAssets; - if (runTest) { - await customRetryAsync(async function () { - try { - supportedAssets = await optimismDataService.getSupportedAssets({ - chainId: data.invalid_optimism_chainid, - provider: BridgingProvider.Connext, - }); - - if (supportedAssets.length === 0) { - addContext(test, message.vali_connext_1); - console.log(message.vali_connext_1); - } else { - addContext(test, message.fail_connext_5); - assert.fail(message.fail_connext_5); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid sender address on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.invalidSender, - toAddress: data.recipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_7); - assert.fail(message.fail_connext_7); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_2); - console.log(message.vali_connext_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without sender address on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - toAddress: data.recipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_8); - assert.fail(message.fail_connext_8); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_3); - console.log(message.vali_connext_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect sender address on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.incorrectSender, - toAddress: data.recipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_9); - assert.fail(message.fail_connext_9); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_4); - console.log(message.vali_connext_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid recepient address on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.invalidRecipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_10); - assert.fail(message.fail_connext_10); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_5); - console.log(message.vali_connext_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without recepient address on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_11); - assert.fail(message.fail_connext_11); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_6); - console.log(message.vali_connext_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect recepient address on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.incorrectRecipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_12); - assert.fail(message.fail_connext_12); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_7); - console.log(message.vali_connext_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without fromChainid details on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_13); - assert.fail(message.fail_connext_13); - } catch (e) { - if (e.message === constant.invalid_address_9) { - addContext(test, message.vali_connext_8); - console.log(message.vali_connext_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without toChainid details on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.optimism_chainid, - fromToken: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_14); - assert.fail(message.fail_connext_14); - } catch (e) { - if (e.message === constant.invalid_address_10) { - addContext(test, message.vali_connext_9); - console.log(message.vali_connext_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid from token address on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.invalidTokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_15); - assert.fail(message.fail_connext_15); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_10); - console.log(message.vali_connext_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_15); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect from token address on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.incorrectTokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_16); - assert.fail(message.fail_connext_16); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_11); - console.log(message.vali_connext_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_16); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without from token address on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_17); - assert.fail(message.fail_connext_17); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_12); - console.log(message.vali_connext_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_17); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid value on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.invalidValue, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_18); - assert.fail(message.fail_connext_18); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_connext_13); - console.log(message.vali_connext_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_18); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with small value on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.smallValue, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_19); - assert.fail(message.fail_connext_19); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_connext_14); - console.log(message.vali_connext_14); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_19); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without value on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_optimismUSDC, - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_20); - assert.fail(message.fail_connext_20); - } catch (e) { - if ( - e.errors[0].constraints.IsBigNumberish === - constant.invalid_bignumber_2 - ) { - addContext(test, message.vali_connext_15); - console.log(message.vali_connext_15); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_20); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without slippage on the optimism network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await optimismDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_21); - assert.fail(message.fail_connext_21); - } catch (e) { - if (e.message === constant.invalid_address_13) { - addContext(test, message.vali_connext_16); - console.log(message.vali_connext_16); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_21); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without fromChainId on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await optimismDataService.getTransactionStatus({ - toChainId: data.matic_chainid, - transactionHash: data.transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_22); - assert.fail(message.fail_connext_22); - } catch (e) { - if ( - e.errors[0].constraints.isPositive === constant.invalid_chainid_4 - ) { - addContext(test, message.vali_connext_17); - console.log(message.vali_connext_17); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_22); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without toChainId on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await optimismDataService.getTransactionStatus({ - fromChainId: data.invalid_optimism_chainid, - transactionHash: data.transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_23); - assert.fail(message.fail_connext_23); - } catch (e) { - if ( - e.errors[0].constraints.isPositive === constant.invalid_chainid_5 - ) { - addContext(test, message.vali_connext_18); - console.log(message.vali_connext_18); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_23); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status with invalid transactionHash on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await optimismDataService.getTransactionStatus({ - fromChainId: data.invalid_optimism_chainid, - toChainId: data.matic_chainid, - transactionHash: data.invalid_transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_24); - assert.fail(message.fail_connext_24); - } catch (e) { - if ( - e.errors[0].constraints.isHex === constant.transactionHash_32hex - ) { - addContext(test, message.vali_connext_19); - console.log(message.vali_connext_19); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_24); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status with incorrect transactionHash on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await optimismDataService.getTransactionStatus({ - fromChainId: data.invalid_optimism_chainid, - toChainId: data.matic_chainid, - transactionHash: data.incorrect_transactionHash, - provider: BridgingProvider.Connext, - }); - - if (transactionStatus.status === constant.invalid_chainid_6) { - addContext(test, message.vali_connext_20); - console.log(message.vali_connext_20); - } else { - addContext(test, eString); - assert.fail(message.fail_connext_25); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_25); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without transactionHash on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await optimismDataService.getTransactionStatus({ - fromChainId: data.invalid_optimism_chainid, - toChainId: data.matic_chainid, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_26); - assert.fail(message.fail_connext_26); - } catch (e) { - if ( - e.errors[0].constraints.isHex === constant.transactionHash_32hex - ) { - addContext(test, message.vali_connext_21); - console.log(message.vali_connext_21); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_26); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/connext/xdai.spec.js b/test/specs/mainnet/connext/xdai.spec.js deleted file mode 100644 index a28d26b..0000000 --- a/test/specs/mainnet/connext/xdai.spec.js +++ /dev/null @@ -1,1254 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils, constants, BigNumber, ethers } from 'ethers'; -import { BridgingProvider } from '@etherspot/prime-sdk/dist/sdk/data/index.js'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let xdaiMainNetSdk; -let xdaiEtherspotWalletAddress; -let xdaiNativeAddress = null; -let xdaiDataService; -let runTest; - -describe('The PrimeSDK, Validate the connext endpoints with xdai network on the MainNet (with old wallet)', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - xdaiMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - xdaiEtherspotWalletAddress = - await xdaiMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - xdaiEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - xdaiDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await xdaiDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === xdaiNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_xdaiUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the all supported assets with valid details on the xdai network', async function () { - var test = this; - let allSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - allSupportedAssets = await xdaiDataService.getSupportedAssets({}); - - try { - assert.isNotEmpty( - allSupportedAssets[0].symbol, - message.vali_connext_getSupportedAssets_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - allSupportedAssets[0].address, - message.vali_connext_getSupportedAssets_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - allSupportedAssets[0].decimals, - message.vali_connext_getSupportedAssets_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - allSupportedAssets[0].chainId, - message.vali_connext_getSupportedAssets_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - allSupportedAssets[0].icon, - message.vali_connext_getSupportedAssets_icon - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the supported assets with valid details on the xdai network', async function () { - var test = this; - let supportedAssets; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - supportedAssets = await xdaiDataService.getSupportedAssets({ - chainId: data.xdai_chainid, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - supportedAssets[0].symbol, - message.vali_connext_getSupportedAssets_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - supportedAssets[0].address, - message.vali_connext_getSupportedAssets_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - supportedAssets[0].decimals, - message.vali_connext_getSupportedAssets_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.strictEqual( - supportedAssets[0].chainId, - data.xdai_chainid, - message.vali_connext_getSupportedAssets_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - supportedAssets[0].icon, - message.vali_connext_getSupportedAssets_icon - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get quotes with valid details on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - quotes[0].data, - message.vali_connext_getQuotes_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(quotes[0].to, message.vali_connext_getQuotes_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes[0].value, - message.vali_connext_getQuotes_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transaction status with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await xdaiDataService.getTransactionStatus({ - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - transactionHash: userOpsReceipt.receipt.transactionHash, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - transactionStatus.status, - message.vali_connext_getTransactionStatus_status - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionStatus.transactionHash, - message.vali_connext_getTransactionStatus_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionStatus.connextscanUrl, - message.vali_connext_getTransactionStatus_connextscanUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid sender address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.invalidSender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_7); - assert.fail(message.fail_connext_7); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_2); - console.log(message.vali_connext_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without sender address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_8); - assert.fail(message.fail_connext_8); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_3); - console.log(message.vali_connext_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect sender address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.incorrectSender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_9); - assert.fail(message.fail_connext_9); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_4); - console.log(message.vali_connext_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid recepient address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.invalidRecipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_10); - assert.fail(message.fail_connext_10); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_5); - console.log(message.vali_connext_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without recepient address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_11); - assert.fail(message.fail_connext_11); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_6); - console.log(message.vali_connext_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect recepient address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.incorrectRecipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_12); - assert.fail(message.fail_connext_12); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_7); - console.log(message.vali_connext_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without fromChainid details on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_13); - assert.fail(message.fail_connext_13); - } catch (e) { - if (e.message === constant.invalid_address_9) { - addContext(test, message.vali_connext_8); - console.log(message.vali_connext_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without toChainid details on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_14); - assert.fail(message.fail_connext_14); - } catch (e) { - if (e.message === constant.invalid_address_10) { - addContext(test, message.vali_connext_9); - console.log(message.vali_connext_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid from token address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.invalidTokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_15); - assert.fail(message.fail_connext_15); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_10); - console.log(message.vali_connext_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_15); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect from token address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.incorrectTokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_16); - assert.fail(message.fail_connext_16); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_11); - console.log(message.vali_connext_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_16); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without from token address on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_17); - assert.fail(message.fail_connext_17); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_12); - console.log(message.vali_connext_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_17); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid value on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.invalidValue, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_18); - assert.fail(message.fail_connext_18); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_connext_13); - console.log(message.vali_connext_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_18); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with small value on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.smallValue, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_19); - assert.fail(message.fail_connext_19); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_connext_14); - console.log(message.vali_connext_14); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_19); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without value on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_20); - assert.fail(message.fail_connext_20); - } catch (e) { - if ( - e.errors[0].constraints.IsBigNumberish === - constant.invalid_bignumber_2 - ) { - addContext(test, message.vali_connext_15); - console.log(message.vali_connext_15); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_20); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without slippage on the xdai network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await xdaiDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_21); - assert.fail(message.fail_connext_21); - } catch (e) { - if (e.message === constant.invalid_address_13) { - addContext(test, message.vali_connext_16); - console.log(message.vali_connext_16); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_21); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without fromChainId on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await xdaiDataService.getTransactionStatus({ - toChainId: data.matic_chainid, - transactionHash: data.transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_22); - assert.fail(message.fail_connext_22); - } catch (e) { - if ( - e.errors[0].constraints.isPositive === constant.invalid_chainid_4 - ) { - addContext(test, message.vali_connext_17); - console.log(message.vali_connext_17); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_22); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without toChainId on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await xdaiDataService.getTransactionStatus({ - fromChainId: data.invalid_xdai_chainid, - transactionHash: data.transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_23); - assert.fail(message.fail_connext_23); - } catch (e) { - if ( - e.errors[0].constraints.isPositive === constant.invalid_chainid_5 - ) { - addContext(test, message.vali_connext_18); - console.log(message.vali_connext_18); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_23); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status with invalid transactionHash on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await xdaiDataService.getTransactionStatus({ - fromChainId: data.invalid_xdai_chainid, - toChainId: data.matic_chainid, - transactionHash: data.invalid_transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_24); - assert.fail(message.fail_connext_24); - } catch (e) { - if ( - e.errors[0].constraints.isHex === constant.transactionHash_32hex - ) { - addContext(test, message.vali_connext_19); - console.log(message.vali_connext_19); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_24); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status with incorrect transactionHash on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await xdaiDataService.getTransactionStatus({ - fromChainId: data.invalid_xdai_chainid, - toChainId: data.matic_chainid, - transactionHash: data.incorrect_transactionHash, - provider: BridgingProvider.Connext, - }); - - if (transactionStatus.status === constant.invalid_chainid_6) { - addContext(test, message.vali_connext_20); - console.log(message.vali_connext_20); - } else { - addContext(test, eString); - assert.fail(message.fail_connext_25); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_25); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without transactionHash on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await xdaiDataService.getTransactionStatus({ - fromChainId: data.invalid_xdai_chainid, - toChainId: data.matic_chainid, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_26); - assert.fail(message.fail_connext_26); - } catch (e) { - if ( - e.errors[0].constraints.isHex === constant.transactionHash_32hex - ) { - addContext(test, message.vali_connext_21); - console.log(message.vali_connext_21); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_26); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/listAndRates/arbitrum.spec.js b/test/specs/mainnet/listAndRates/arbitrum.spec.js deleted file mode 100644 index 9f1eb82..0000000 --- a/test/specs/mainnet/listAndRates/arbitrum.spec.js +++ /dev/null @@ -1,820 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let arbitrumMainNetSdk; -let arbitrumEtherspotWalletAddress; -let arbitrumNativeAddress = null; -let arbitrumDataService; -let runTest; - -describe('The PrimeSDK, when get the NFT List, Token List and Exchange Rates details with arbitrum network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - arbitrumMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.arbitrum_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.arbitrum_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - arbitrumMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - arbitrumEtherspotWalletAddress = - await arbitrumMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - arbitrumEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - arbitrumDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await arbitrumDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === arbitrumNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_arbitrumUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the NFT List on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let nfts; - try { - nfts = await arbitrumDataService.getNftList({ - chainId: Number(data.arbitrum_chainid), - account: data.sender, - }); - - if (nfts.items.length > 0) { - addContext(test, message.pass_nft_list_1); - console.log(message.pass_nftList_1); - - try { - assert.isNotEmpty( - nfts.items[0].contractAddress, - message.vali_nftList_contractAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].tokenType, - message.vali_nftList_tokenType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - nfts.items[0].balance, - message.vali_nftList_balance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].items[0].tokenId, - message.vali_nftList_items_tokenId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - nfts.items[0].items[0].amount, - message.vali_nftList_items_amount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_nftList_2); - console.log(message.pass_nftList_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Token List on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let tokenLists; - let tokenListTokens; - try { - tokenLists = await arbitrumDataService.getTokenLists({ - chainId: data.arbitrum_chainid, - }); - - if (tokenLists.length > 0) { - console.log(message.pass_tokenList_1); - addContext(test, message.pass_tokenList_1); - - try { - assert.isNotEmpty( - tokenLists[0].name, - message.vali_tokenList_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenLists[0].endpoint, - message.vali_tokenList_endpoint - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_2); - console.log(message.pass_tokenList_2); - } - - tokenListTokens = await arbitrumDataService.getTokenListTokens({ - chainId: data.arbitrum_chainid, - }); - - if (tokenListTokens.length > 0) { - console.log(message.pass_tokenList_3); - addContext(test, message.pass_tokenList_3); - - try { - assert.isNotEmpty( - tokenListTokens[0].address, - message.vali_tokenListTokens_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].name, - message.vali_tokenListTokens_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].symbol, - message.vali_tokenListTokens_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].decimals, - message.vali_tokenListTokens_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].logoURI, - message.vali_tokenListTokens_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].chainId, - message.vali_tokenListTokens_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_4); - console.log(message.pass_tokenList_4); - } - - if (tokenLists.length > 0) { - const { name } = tokenLists[0]; - - tokenListTokens = await arbitrumDataService.getTokenListTokens({ - chainId: data.arbitrum_chainid, - name, - }); - - if (tokenListTokens.length > 0) { - addContext(test, message.pass_tokenList_5); - console.log(message.pass_tokenList_5); - - try { - assert.isNotEmpty( - tokenListTokens[0].address, - message.vali_selectedTokenListTokens_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].name, - message.vali_selectedTokenListTokens_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].symbol, - message.vali_selectedTokenListTokens_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].decimals, - message.vali_selectedTokenListTokens_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].logoURI, - message.vali_selectedTokenListTokens_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].chainId, - message.vali_selectedTokenListTokens_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_6); - console.log(message.pass_tokenList_6); - } - } else { - addContext(test, message.pass_tokenList_2); - console.log(message.pass_tokenList_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_tokenList_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.tokenList_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Exchange Rates on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let TOKEN_LIST; - let rates; - let requestPayload; - try { - TOKEN_LIST = [ - data.tokenAddress_arbitrumUSDC, - data.tokenAddress_arbitrumUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.arbitrum_chainid), - }; - - rates = await arbitrumDataService.fetchExchangeRates(requestPayload); - - for (let i = 0; i < rates.items.length; i++) { - try { - assert.isNotEmpty( - rates.items[i].address, - message.vali_exchangeRates_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].eth, - message.vali_exchangeRates_eth - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].eur, - message.vali_exchangeRates_eur - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].gbp, - message.vali_exchangeRates_gbp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].usd, - message.vali_exchangeRates_usd - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the NFT List with invalid account address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await arbitrumDataService.getNftList({ - chainId: Number(data.arbitrum_chainid), - account: data.invalidSender, - }); - - addContext(test, message.fail_nftList_2); - assert.fail(message.fail_nftList_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.pass_nftList_3); - console.log(message.pass_nftList_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the NFT List with incorrect account address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await arbitrumDataService.getNftList({ - chainId: Number(data.arbitrum_chainid), - account: data.incorrectSender, - }); - - addContext(test, message.fail_nftList_3); - assert.fail(message.fail_nftList_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.pass_nftList_4); - console.log(message.pass_nftList_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with other token address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - try { - TOKEN_LIST = [ - data.tokenAddress_arbitrumUSDC, - data.tokenAddress_arbitrumUSDT, - data.tokenAddress_maticUSDC, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.arbitrum_chainid), - }; - - await arbitrumDataService.fetchExchangeRates(requestPayload); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with invalid token address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - try { - TOKEN_LIST = [ - data.invalidTokenAddress_arbitrumUSDC, - data.tokenAddress_arbitrumUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.arbitrum_chainid), - }; - - await arbitrumDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_3); - assert.fail(message.fail_exchangeRates_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_1); - console.log(message.pass_exchangeRates_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with incorrect token address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.incorrectTokenAddress_arbitrumUSDC, - data.tokenAddress_arbitrumUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.arbitrum_chainid), - }; - - await arbitrumDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_4); - assert.fail(message.fail_exchangeRates_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_2); - console.log(message.pass_exchangeRates_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates without token address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let TOKEN_LIST = []; - - let requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.arbitrum_chainid), - }; - - await arbitrumDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_5); - assert.fail(message.fail_exchangeRates_5); - } catch (e) { - let error = e.message; - if (error.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_3); - console.log(message.pass_exchangeRates_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with invalid chainid on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.tokenAddress_arbitrumUSDC, - data.tokenAddress_arbitrumUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.invalid_arbitrum_chainid), - }; - - await arbitrumDataService.fetchExchangeRates(requestPayload); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_4); - console.log(message.pass_exchangeRates_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates without chainid on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.tokenAddress_arbitrumUSDC, - data.tokenAddress_arbitrumUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - }; - - await arbitrumDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_7); - assert.fail(message.fail_exchangeRates_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_5); - console.log(message.pass_exchangeRates_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/listAndRates/matic.spec.js b/test/specs/mainnet/listAndRates/matic.spec.js deleted file mode 100644 index 24d3c46..0000000 --- a/test/specs/mainnet/listAndRates/matic.spec.js +++ /dev/null @@ -1,820 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let maticMainNetSdk; -let maticEtherspotWalletAddress; -let maticNativeAddress = null; -let maticDataService; -let runTest; - -describe('The PrimeSDK, when get the NFT List, Token List and Exchange Rates details with matic network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - maticMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.matic_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.matic_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - maticMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - maticEtherspotWalletAddress = - await maticMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - maticEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - maticDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await maticDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.matic_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === maticNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_maticUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the NFT List on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let nfts; - try { - nfts = await maticDataService.getNftList({ - chainId: Number(data.matic_chainid), - account: data.sender, - }); - - if (nfts.items.length > 0) { - addContext(test, message.pass_nft_list_1); - console.log(message.pass_nftList_1); - - try { - assert.isNotEmpty( - nfts.items[0].contractAddress, - message.vali_nftList_contractAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].tokenType, - message.vali_nftList_tokenType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - nfts.items[0].balance, - message.vali_nftList_balance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].items[0].tokenId, - message.vali_nftList_items_tokenId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - nfts.items[0].items[0].amount, - message.vali_nftList_items_amount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_nftList_2); - console.log(message.pass_nftList_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Token List on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let tokenLists; - let tokenListTokens; - try { - tokenLists = await maticDataService.getTokenLists({ - chainId: data.matic_chainid, - }); - - if (tokenLists.length > 0) { - console.log(message.pass_tokenList_1); - addContext(test, message.pass_tokenList_1); - - try { - assert.isNotEmpty( - tokenLists[0].name, - message.vali_tokenList_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenLists[0].endpoint, - message.vali_tokenList_endpoint - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_2); - console.log(message.pass_tokenList_2); - } - - tokenListTokens = await maticDataService.getTokenListTokens({ - chainId: data.matic_chainid, - }); - - if (tokenListTokens.length > 0) { - console.log(message.pass_tokenList_3); - addContext(test, message.pass_tokenList_3); - - try { - assert.isNotEmpty( - tokenListTokens[0].address, - message.vali_tokenListTokens_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].name, - message.vali_tokenListTokens_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].symbol, - message.vali_tokenListTokens_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].decimals, - message.vali_tokenListTokens_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].logoURI, - message.vali_tokenListTokens_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].chainId, - message.vali_tokenListTokens_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_4); - console.log(message.pass_tokenList_4); - } - - if (tokenLists.length > 0) { - const { name } = tokenLists[0]; - - tokenListTokens = await maticDataService.getTokenListTokens({ - chainId: data.matic_chainid, - name, - }); - - if (tokenListTokens.length > 0) { - addContext(test, message.pass_tokenList_5); - console.log(message.pass_tokenList_5); - - try { - assert.isNotEmpty( - tokenListTokens[0].address, - message.vali_selectedTokenListTokens_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].name, - message.vali_selectedTokenListTokens_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].symbol, - message.vali_selectedTokenListTokens_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].decimals, - message.vali_selectedTokenListTokens_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].logoURI, - message.vali_selectedTokenListTokens_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].chainId, - message.vali_selectedTokenListTokens_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_6); - console.log(message.pass_tokenList_6); - } - } else { - addContext(test, message.pass_tokenList_2); - console.log(message.pass_tokenList_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_tokenList_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.tokenList_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Exchange Rates on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let TOKEN_LIST; - let rates; - let requestPayload; - try { - TOKEN_LIST = [ - data.tokenAddress_maticUSDC, - data.tokenAddress_maticUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.matic_chainid), - }; - - rates = await maticDataService.fetchExchangeRates(requestPayload); - - for (let i = 0; i < rates.items.length; i++) { - try { - assert.isNotEmpty( - rates.items[i].address, - message.vali_exchangeRates_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].eth, - message.vali_exchangeRates_eth - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].eur, - message.vali_exchangeRates_eur - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].gbp, - message.vali_exchangeRates_gbp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].usd, - message.vali_exchangeRates_usd - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the NFT List with invalid account address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await maticDataService.getNftList({ - chainId: Number(data.matic_chainid), - account: data.invalidSender, - }); - - addContext(test, message.fail_nftList_2); - assert.fail(message.fail_nftList_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.pass_nftList_3); - console.log(message.pass_nftList_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the NFT List with incorrect account address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await maticDataService.getNftList({ - chainId: Number(data.matic_chainid), - account: data.incorrectSender, - }); - - addContext(test, message.fail_nftList_3); - assert.fail(message.fail_nftList_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.pass_nftList_4); - console.log(message.pass_nftList_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with other token address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - try { - TOKEN_LIST = [ - data.tokenAddress_maticUSDC, - data.tokenAddress_maticUSDT, - data.tokenAddress_maticUSDC, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.matic_chainid), - }; - - await maticDataService.fetchExchangeRates(requestPayload); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with invalid token address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - try { - TOKEN_LIST = [ - data.invalidTokenAddress_maticUSDC, - data.tokenAddress_maticUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.matic_chainid), - }; - - await maticDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_3); - assert.fail(message.fail_exchangeRates_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_1); - console.log(message.pass_exchangeRates_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with incorrect token address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.incorrectTokenAddress_maticUSDC, - data.tokenAddress_maticUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.matic_chainid), - }; - - await maticDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_4); - assert.fail(message.fail_exchangeRates_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_2); - console.log(message.pass_exchangeRates_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates without token address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let TOKEN_LIST = []; - - let requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.matic_chainid), - }; - - await maticDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_5); - assert.fail(message.fail_exchangeRates_5); - } catch (e) { - let error = e.message; - if (error.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_3); - console.log(message.pass_exchangeRates_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with invalid chainid on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.tokenAddress_maticUSDC, - data.tokenAddress_maticUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.invalid_matic_chainid), - }; - - await maticDataService.fetchExchangeRates(requestPayload); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_4); - console.log(message.pass_exchangeRates_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates without chainid on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.tokenAddress_maticUSDC, - data.tokenAddress_maticUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - }; - - await maticDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_7); - assert.fail(message.fail_exchangeRates_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_5); - console.log(message.pass_exchangeRates_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/listAndRates/newWallet_listAndRates.spec.js b/test/specs/mainnet/listAndRates/newWallet_listAndRates.spec.js new file mode 100644 index 0000000..89ec442 --- /dev/null +++ b/test/specs/mainnet/listAndRates/newWallet_listAndRates.spec.js @@ -0,0 +1,849 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { utils } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import helper from '../../../utils/helper.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidChainId, + randomInvalidTokenAddress, + randomToTokenAddress, + randomTokenAddress, + randomTokenAddressUsdt, +} from '../../../utils/sharedData_mainnet.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let mainnetPrimeSdk; +let nativeAddress = null; +let dataService; +let runTest; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Validate the NFT List, Token List and Exchange Rates details on the MainNet (with new wallet)', function () { + before(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }); + + it( + 'SMOKE: Validate the NFT List on the ' + randomChainName + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let nfts; + try { + nfts = await dataService.getNftList({ + chainId: Number(randomChainId), + account: data.sender, + }); + + if (nfts.items.length > 0) { + addContext(test, message.pass_nft_list_1); + console.log(message.pass_nftList_1); + + try { + assert.isNotEmpty( + nfts.items[0].contractAddress, + message.vali_nftList_contractAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + nfts.items[0].tokenType, + message.vali_nftList_tokenType + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + nfts.items[0].balance, + message.vali_nftList_balance + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + nfts.items[0].items[0].tokenId, + message.vali_nftList_items_tokenId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + nfts.items[0].items[0].amount, + message.vali_nftList_items_amount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_nftList_2); + console.log(message.pass_nftList_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_nftList_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the Token List on the ' + randomChainName + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let tokenLists; + let tokenListTokens; + try { + tokenLists = await dataService.getTokenLists({ + chainId: randomChainId, + }); + + if (tokenLists.length > 0) { + console.log(message.pass_tokenList_1); + addContext(test, message.pass_tokenList_1); + + try { + assert.isNotEmpty( + tokenLists[0].name, + message.vali_tokenList_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenLists[0].endpoint, + message.vali_tokenList_endpoint + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_tokenList_2); + console.log(message.pass_tokenList_2); + } + + tokenListTokens = await dataService.getTokenListTokens({ + chainId: randomChainId, + }); + + if (tokenListTokens.length > 0) { + console.log(message.pass_tokenList_3); + addContext(test, message.pass_tokenList_3); + + try { + assert.isNotEmpty( + tokenListTokens[0].address, + message.vali_tokenListTokens_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].name, + message.vali_tokenListTokens_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].symbol, + message.vali_tokenListTokens_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].decimals, + message.vali_tokenListTokens_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].logoURI, + message.vali_tokenListTokens_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].chainId, + message.vali_tokenListTokens_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_tokenList_4); + console.log(message.pass_tokenList_4); + } + + if (tokenLists.length > 0) { + const { name } = tokenLists[0]; + + tokenListTokens = await dataService.getTokenListTokens({ + chainId: randomChainId, + name, + }); + + if (tokenListTokens.length > 0) { + addContext(test, message.pass_tokenList_5); + console.log(message.pass_tokenList_5); + + try { + assert.isNotEmpty( + tokenListTokens[0].address, + message.vali_selectedTokenListTokens_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].name, + message.vali_selectedTokenListTokens_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].symbol, + message.vali_selectedTokenListTokens_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].decimals, + message.vali_selectedTokenListTokens_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].logoURI, + message.vali_selectedTokenListTokens_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].chainId, + message.vali_selectedTokenListTokens_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_tokenList_6); + console.log(message.pass_tokenList_6); + } + } else { + addContext(test, message.pass_tokenList_2); + console.log(message.pass_tokenList_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_tokenList_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.tokenList_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the Exchange Rates on the ' + randomChainName + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let TOKEN_LIST; + let rates; + let requestPayload; + try { + TOKEN_LIST = [randomTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + rates = await dataService.fetchExchangeRates(requestPayload); + + for (let i = 0; i < rates.items.length; i++) { + try { + assert.isNotEmpty( + rates.items[i].address, + message.vali_exchangeRates_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].eth, + message.vali_exchangeRates_eth + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].eur, + message.vali_exchangeRates_eur + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].gbp, + message.vali_exchangeRates_gbp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].usd, + message.vali_exchangeRates_usd + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the NFT List with invalid account address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + await dataService.getNftList({ + chainId: Number(randomChainId), + account: data.invalidSender, + }); + + addContext(test, message.fail_nftList_2); + assert.fail(message.fail_nftList_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.pass_nftList_3); + console.log(message.pass_nftList_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_nftList_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.nftList_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the NFT List with incorrect account address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + await dataService.getNftList({ + chainId: Number(randomChainId), + account: data.incorrectSender, + }); + + addContext(test, message.fail_nftList_3); + assert.fail(message.fail_nftList_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.pass_nftList_4); + console.log(message.pass_nftList_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_nftList_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.nftList_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with other token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + try { + TOKEN_LIST = [ + randomTokenAddress, + randomTokenAddressUsdt, + randomToTokenAddress, + ]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with invalid token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + try { + TOKEN_LIST = [randomInvalidTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_3); + assert.fail(message.fail_exchangeRates_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_1); + console.log(message.pass_exchangeRates_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with incorrect token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + + try { + TOKEN_LIST = [randomIncorrectTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_4); + assert.fail(message.fail_exchangeRates_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_2); + console.log(message.pass_exchangeRates_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates without token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + let TOKEN_LIST = []; + + let requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_5); + assert.fail(message.fail_exchangeRates_5); + } catch (e) { + let error = e.message; + if (error.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_3); + console.log(message.pass_exchangeRates_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with invalid chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + + try { + TOKEN_LIST = [randomTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomInvalidChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_4); + console.log(message.pass_exchangeRates_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates without chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + + try { + TOKEN_LIST = [randomTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_7); + assert.fail(message.fail_exchangeRates_7); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_5); + console.log(message.pass_exchangeRates_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/mainnet/listAndRates/newWallet_xdai.spec.js b/test/specs/mainnet/listAndRates/newWallet_xdai.spec.js deleted file mode 100644 index 3de92fd..0000000 --- a/test/specs/mainnet/listAndRates/newWallet_xdai.spec.js +++ /dev/null @@ -1,797 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import testUtils from '../../../utils/testUtils.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; -import { dirname } from 'path'; -import { fileURLToPath } from 'url'; -import fs from 'fs'; -import path from 'path'; - -let xdaiMainNetSdk; -let xdaiNativeAddress = null; -let xdaiDataService; -let runTest; -const __dirname = dirname(fileURLToPath(import.meta.url)); - -describe('The PrimeSDK, when get the NFT List, Token List and Exchange Rates details with xdai network on the MainNet (with new wallet)', function () { - before(async function () { - const filePath = path.join(__dirname, '../../../utils/testUtils.json'); - const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); - - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: sharedState.newPrivateKey }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // initializating Data service... - try { - xdaiDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - }, data.retry); // Retry this async test up to 5 times - }); - - beforeEach(async function () { - // validate the balance of the wallet - try { - let output = await xdaiDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === xdaiNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_xdaiUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }); - - it('SMOKE: Validate the NFT List on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let nfts; - try { - nfts = await xdaiDataService.getNftList({ - chainId: Number(data.xdai_chainid), - account: data.sender, - }); - - if (nfts.items.length > 0) { - addContext(test, message.pass_nft_list_1); - console.log(message.pass_nftList_1); - - try { - assert.isNotEmpty( - nfts.items[0].contractAddress, - message.vali_nftList_contractAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].tokenType, - message.vali_nftList_tokenType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - nfts.items[0].balance, - message.vali_nftList_balance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].items[0].tokenId, - message.vali_nftList_items_tokenId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - nfts.items[0].items[0].amount, - message.vali_nftList_items_amount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_nftList_2); - console.log(message.pass_nftList_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Token List on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let tokenLists; - let tokenListTokens; - try { - tokenLists = await xdaiDataService.getTokenLists({ - chainId: data.xdai_chainid, - }); - - if (tokenLists.length > 0) { - console.log(message.pass_tokenList_1); - addContext(test, message.pass_tokenList_1); - - try { - assert.isNotEmpty( - tokenLists[0].name, - message.vali_tokenList_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenLists[0].endpoint, - message.vali_tokenList_endpoint - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_2); - console.log(message.pass_tokenList_2); - } - - tokenListTokens = await xdaiDataService.getTokenListTokens({ - chainId: data.xdai_chainid, - }); - - if (tokenListTokens.length > 0) { - console.log(message.pass_tokenList_3); - addContext(test, message.pass_tokenList_3); - - try { - assert.isNotEmpty( - tokenListTokens[0].address, - message.vali_tokenListTokens_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].name, - message.vali_tokenListTokens_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].symbol, - message.vali_tokenListTokens_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].decimals, - message.vali_tokenListTokens_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].logoURI, - message.vali_tokenListTokens_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].chainId, - message.vali_tokenListTokens_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_4); - console.log(message.pass_tokenList_4); - } - - if (tokenLists.length > 0) { - const { name } = tokenLists[0]; - - tokenListTokens = await xdaiDataService.getTokenListTokens({ - chainId: data.xdai_chainid, - name, - }); - - if (tokenListTokens.length > 0) { - addContext(test, message.pass_tokenList_5); - console.log(message.pass_tokenList_5); - - try { - assert.isNotEmpty( - tokenListTokens[0].address, - message.vali_selectedTokenListTokens_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].name, - message.vali_selectedTokenListTokens_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].symbol, - message.vali_selectedTokenListTokens_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].decimals, - message.vali_selectedTokenListTokens_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].logoURI, - message.vali_selectedTokenListTokens_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].chainId, - message.vali_selectedTokenListTokens_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_6); - console.log(message.pass_tokenList_6); - } - } else { - addContext(test, message.pass_tokenList_2); - console.log(message.pass_tokenList_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_tokenList_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.tokenList_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Exchange Rates on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let TOKEN_LIST; - let rates; - let requestPayload; - try { - TOKEN_LIST = [data.tokenAddress_xdaiUSDC, data.tokenAddress_xdaiUSDT]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.xdai_chainid), - }; - - rates = await xdaiDataService.fetchExchangeRates(requestPayload); - - for (let i = 0; i < rates.items.length; i++) { - try { - assert.isNotEmpty( - rates.items[i].address, - message.vali_exchangeRates_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].eth, - message.vali_exchangeRates_eth - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].eur, - message.vali_exchangeRates_eur - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].gbp, - message.vali_exchangeRates_gbp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].usd, - message.vali_exchangeRates_usd - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the NFT List with invalid account address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await xdaiDataService.getNftList({ - chainId: Number(data.xdai_chainid), - account: data.invalidSender, - }); - - addContext(test, message.fail_nftList_2); - assert.fail(message.fail_nftList_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.pass_nftList_3); - console.log(message.pass_nftList_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the NFT List with incorrect account address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await xdaiDataService.getNftList({ - chainId: Number(data.xdai_chainid), - account: data.incorrectSender, - }); - - addContext(test, message.fail_nftList_3); - assert.fail(message.fail_nftList_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.pass_nftList_4); - console.log(message.pass_nftList_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with other token address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - try { - TOKEN_LIST = [ - data.tokenAddress_xdaiUSDC, - data.tokenAddress_xdaiUSDT, - data.tokenAddress_maticUSDC, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.xdai_chainid), - }; - - await xdaiDataService.fetchExchangeRates(requestPayload); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with invalid token address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - try { - TOKEN_LIST = [ - data.invalidTokenAddress_xdaiUSDC, - data.tokenAddress_xdaiUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.xdai_chainid), - }; - - await xdaiDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_3); - assert.fail(message.fail_exchangeRates_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_1); - console.log(message.pass_exchangeRates_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with incorrect token address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.incorrectTokenAddress_xdaiUSDC, - data.tokenAddress_xdaiUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.xdai_chainid), - }; - - await xdaiDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_4); - assert.fail(message.fail_exchangeRates_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_2); - console.log(message.pass_exchangeRates_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates without token address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let TOKEN_LIST = []; - - let requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.xdai_chainid), - }; - - await xdaiDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_5); - assert.fail(message.fail_exchangeRates_5); - } catch (e) { - let error = e.message; - if (error.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_3); - console.log(message.pass_exchangeRates_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with invalid chainid on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [data.tokenAddress_xdaiUSDC, data.tokenAddress_xdaiUSDT]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.invalid_xdai_chainid), - }; - - await xdaiDataService.fetchExchangeRates(requestPayload); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_4); - console.log(message.pass_exchangeRates_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates without chainid on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [data.tokenAddress_xdaiUSDC, data.tokenAddress_xdaiUSDT]; - - requestPayload = { - tokens: TOKEN_LIST, - }; - - await xdaiDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_7); - assert.fail(message.fail_exchangeRates_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_5); - console.log(message.pass_exchangeRates_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/listAndRates/oldWallet_listAndRates.spec.js b/test/specs/mainnet/listAndRates/oldWallet_listAndRates.spec.js new file mode 100644 index 0000000..96afba9 --- /dev/null +++ b/test/specs/mainnet/listAndRates/oldWallet_listAndRates.spec.js @@ -0,0 +1,918 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { utils } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidChainId, + randomInvalidTokenAddress, + randomToTokenAddress, + randomTokenAddress, + randomTokenAddressUsdt, +} from '../../../utils/sharedData_mainnet.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; + +let mainnetPrimeSdk; +let etherspotWalletAddress; +let nativeAddress = null; +let dataService; +let runTest; + +describe('Validate the NFT List, Token List and Exchange Rates details on the MainNet (with old wallet)', function () { + before(async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + + try { + assert.strictEqual( + mainnetPrimeSdk.state.EOAAddress, + data.eoaAddress, + message.vali_eoa_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get EtherspotWallet address + try { + etherspotWalletAddress = + await mainnetPrimeSdk.getCounterFactualAddress(); + + try { + assert.strictEqual( + etherspotWalletAddress, + data.sender, + message.vali_smart_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e.message); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + var test = this; + + await customRetryAsync(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + it( + 'SMOKE: Validate the NFT List on the ' + randomChainName + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let nfts; + try { + nfts = await dataService.getNftList({ + chainId: Number(randomChainId), + account: data.sender, + }); + + if (nfts.items.length > 0) { + addContext(test, message.pass_nft_list_1); + console.log(message.pass_nftList_1); + + try { + assert.isNotEmpty( + nfts.items[0].contractAddress, + message.vali_nftList_contractAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + nfts.items[0].tokenType, + message.vali_nftList_tokenType + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + nfts.items[0].balance, + message.vali_nftList_balance + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + nfts.items[0].items[0].tokenId, + message.vali_nftList_items_tokenId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + nfts.items[0].items[0].amount, + message.vali_nftList_items_amount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_nftList_2); + console.log(message.pass_nftList_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_nftList_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the Token List on the ' + randomChainName + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let tokenLists; + let tokenListTokens; + try { + tokenLists = await dataService.getTokenLists({ + chainId: randomChainId, + }); + + if (tokenLists.length > 0) { + console.log(message.pass_tokenList_1); + addContext(test, message.pass_tokenList_1); + + try { + assert.isNotEmpty( + tokenLists[0].name, + message.vali_tokenList_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenLists[0].endpoint, + message.vali_tokenList_endpoint + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_tokenList_2); + console.log(message.pass_tokenList_2); + } + + tokenListTokens = await dataService.getTokenListTokens({ + chainId: randomChainId, + }); + + if (tokenListTokens.length > 0) { + console.log(message.pass_tokenList_3); + addContext(test, message.pass_tokenList_3); + + try { + assert.isNotEmpty( + tokenListTokens[0].address, + message.vali_tokenListTokens_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].name, + message.vali_tokenListTokens_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].symbol, + message.vali_tokenListTokens_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].decimals, + message.vali_tokenListTokens_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].logoURI, + message.vali_tokenListTokens_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].chainId, + message.vali_tokenListTokens_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_tokenList_4); + console.log(message.pass_tokenList_4); + } + + if (tokenLists.length > 0) { + const { name } = tokenLists[0]; + + tokenListTokens = await dataService.getTokenListTokens({ + chainId: randomChainId, + name, + }); + + if (tokenListTokens.length > 0) { + addContext(test, message.pass_tokenList_5); + console.log(message.pass_tokenList_5); + + try { + assert.isNotEmpty( + tokenListTokens[0].address, + message.vali_selectedTokenListTokens_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].name, + message.vali_selectedTokenListTokens_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].symbol, + message.vali_selectedTokenListTokens_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].decimals, + message.vali_selectedTokenListTokens_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].logoURI, + message.vali_selectedTokenListTokens_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].chainId, + message.vali_selectedTokenListTokens_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_tokenList_6); + console.log(message.pass_tokenList_6); + } + } else { + addContext(test, message.pass_tokenList_2); + console.log(message.pass_tokenList_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_tokenList_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.tokenList_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the Exchange Rates on the ' + randomChainName + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let TOKEN_LIST; + let rates; + let requestPayload; + try { + TOKEN_LIST = [randomTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + rates = await dataService.fetchExchangeRates(requestPayload); + + for (let i = 0; i < rates.items.length; i++) { + try { + assert.isNotEmpty( + rates.items[i].address, + message.vali_exchangeRates_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].eth, + message.vali_exchangeRates_eth + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].eur, + message.vali_exchangeRates_eur + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].gbp, + message.vali_exchangeRates_gbp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].usd, + message.vali_exchangeRates_usd + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the NFT List with invalid account address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + await dataService.getNftList({ + chainId: Number(randomChainId), + account: data.invalidSender, + }); + + addContext(test, message.fail_nftList_2); + assert.fail(message.fail_nftList_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.pass_nftList_3); + console.log(message.pass_nftList_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_nftList_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.nftList_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the NFT List with incorrect account address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + await dataService.getNftList({ + chainId: Number(randomChainId), + account: data.incorrectSender, + }); + + addContext(test, message.fail_nftList_3); + assert.fail(message.fail_nftList_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.pass_nftList_4); + console.log(message.pass_nftList_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_nftList_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.nftList_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with other token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + try { + TOKEN_LIST = [ + randomTokenAddress, + randomTokenAddressUsdt, + randomToTokenAddress, + ]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with invalid token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + try { + TOKEN_LIST = [randomInvalidTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_3); + assert.fail(message.fail_exchangeRates_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_1); + console.log(message.pass_exchangeRates_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with incorrect token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + + try { + TOKEN_LIST = [randomIncorrectTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_4); + assert.fail(message.fail_exchangeRates_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_2); + console.log(message.pass_exchangeRates_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates without token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + let TOKEN_LIST = []; + + let requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_5); + assert.fail(message.fail_exchangeRates_5); + } catch (e) { + let error = e.message; + if (error.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_3); + console.log(message.pass_exchangeRates_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with invalid chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + + try { + TOKEN_LIST = [randomTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomInvalidChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_4); + console.log(message.pass_exchangeRates_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates without chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + + try { + TOKEN_LIST = [randomTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_7); + assert.fail(message.fail_exchangeRates_7); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_5); + console.log(message.pass_exchangeRates_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/mainnet/listAndRates/optimism.spec.js b/test/specs/mainnet/listAndRates/optimism.spec.js deleted file mode 100644 index 4c0375f..0000000 --- a/test/specs/mainnet/listAndRates/optimism.spec.js +++ /dev/null @@ -1,820 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let optimismMainNetSdk; -let optimismEtherspotWalletAddress; -let optimismNativeAddress = null; -let optimismDataService; -let runTest; - -describe('The PrimeSDK, when get the NFT List, Token List and Exchange Rates details with optimism network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - optimismMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.optimism_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.optimism_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - optimismMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - optimismEtherspotWalletAddress = - await optimismMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - optimismEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - optimismDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await optimismDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === optimismNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_optimismUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the NFT List on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let nfts; - try { - nfts = await optimismDataService.getNftList({ - chainId: Number(data.optimism_chainid), - account: data.sender, - }); - - if (nfts.items.length > 0) { - addContext(test, message.pass_nft_list_1); - console.log(message.pass_nftList_1); - - try { - assert.isNotEmpty( - nfts.items[0].contractAddress, - message.vali_nftList_contractAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].tokenType, - message.vali_nftList_tokenType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - nfts.items[0].balance, - message.vali_nftList_balance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].items[0].tokenId, - message.vali_nftList_items_tokenId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - nfts.items[0].items[0].amount, - message.vali_nftList_items_amount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_nftList_2); - console.log(message.pass_nftList_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Token List on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let tokenLists; - let tokenListTokens; - try { - tokenLists = await optimismDataService.getTokenLists({ - chainId: data.optimism_chainid, - }); - - if (tokenLists.length > 0) { - console.log(message.pass_tokenList_1); - addContext(test, message.pass_tokenList_1); - - try { - assert.isNotEmpty( - tokenLists[0].name, - message.vali_tokenList_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenLists[0].endpoint, - message.vali_tokenList_endpoint - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_2); - console.log(message.pass_tokenList_2); - } - - tokenListTokens = await optimismDataService.getTokenListTokens({ - chainId: data.optimism_chainid, - }); - - if (tokenListTokens.length > 0) { - console.log(message.pass_tokenList_3); - addContext(test, message.pass_tokenList_3); - - try { - assert.isNotEmpty( - tokenListTokens[0].address, - message.vali_tokenListTokens_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].name, - message.vali_tokenListTokens_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].symbol, - message.vali_tokenListTokens_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].decimals, - message.vali_tokenListTokens_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].logoURI, - message.vali_tokenListTokens_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].chainId, - message.vali_tokenListTokens_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_4); - console.log(message.pass_tokenList_4); - } - - if (tokenLists.length > 0) { - const { name } = tokenLists[0]; - - tokenListTokens = await optimismDataService.getTokenListTokens({ - chainId: data.optimism_chainid, - name, - }); - - if (tokenListTokens.length > 0) { - addContext(test, message.pass_tokenList_5); - console.log(message.pass_tokenList_5); - - try { - assert.isNotEmpty( - tokenListTokens[0].address, - message.vali_selectedTokenListTokens_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].name, - message.vali_selectedTokenListTokens_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].symbol, - message.vali_selectedTokenListTokens_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].decimals, - message.vali_selectedTokenListTokens_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].logoURI, - message.vali_selectedTokenListTokens_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].chainId, - message.vali_selectedTokenListTokens_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_6); - console.log(message.pass_tokenList_6); - } - } else { - addContext(test, message.pass_tokenList_2); - console.log(message.pass_tokenList_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_tokenList_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.tokenList_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Exchange Rates on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let TOKEN_LIST; - let rates; - let requestPayload; - try { - TOKEN_LIST = [ - data.tokenAddress_optimismUSDC, - data.tokenAddress_optimismUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.optimism_chainid), - }; - - rates = await optimismDataService.fetchExchangeRates(requestPayload); - - for (let i = 0; i < rates.items.length; i++) { - try { - assert.isNotEmpty( - rates.items[i].address, - message.vali_exchangeRates_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].eth, - message.vali_exchangeRates_eth - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].eur, - message.vali_exchangeRates_eur - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].gbp, - message.vali_exchangeRates_gbp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].usd, - message.vali_exchangeRates_usd - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the NFT List with invalid account address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await optimismDataService.getNftList({ - chainId: Number(data.optimism_chainid), - account: data.invalidSender, - }); - - addContext(test, message.fail_nftList_2); - assert.fail(message.fail_nftList_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.pass_nftList_3); - console.log(message.pass_nftList_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the NFT List with incorrect account address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await optimismDataService.getNftList({ - chainId: Number(data.optimism_chainid), - account: data.incorrectSender, - }); - - addContext(test, message.fail_nftList_3); - assert.fail(message.fail_nftList_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.pass_nftList_4); - console.log(message.pass_nftList_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with other token address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - try { - TOKEN_LIST = [ - data.tokenAddress_optimismUSDC, - data.tokenAddress_optimismUSDT, - data.tokenAddress_maticUSDC, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.optimism_chainid), - }; - - await optimismDataService.fetchExchangeRates(requestPayload); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with invalid token address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - try { - TOKEN_LIST = [ - data.invalidTokenAddress_optimismUSDC, - data.tokenAddress_optimismUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.optimism_chainid), - }; - - await optimismDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_3); - assert.fail(message.fail_exchangeRates_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_1); - console.log(message.pass_exchangeRates_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with incorrect token address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.incorrectTokenAddress_optimismUSDC, - data.tokenAddress_optimismUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.optimism_chainid), - }; - - await optimismDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_4); - assert.fail(message.fail_exchangeRates_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_2); - console.log(message.pass_exchangeRates_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates without token address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let TOKEN_LIST = []; - - let requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.optimism_chainid), - }; - - await optimismDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_5); - assert.fail(message.fail_exchangeRates_5); - } catch (e) { - let error = e.message; - if (error.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_3); - console.log(message.pass_exchangeRates_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with invalid chainid on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.tokenAddress_optimismUSDC, - data.tokenAddress_optimismUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.invalid_optimism_chainid), - }; - - await optimismDataService.fetchExchangeRates(requestPayload); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_4); - console.log(message.pass_exchangeRates_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates without chainid on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.tokenAddress_optimismUSDC, - data.tokenAddress_optimismUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - }; - - await optimismDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_7); - assert.fail(message.fail_exchangeRates_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_5); - console.log(message.pass_exchangeRates_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/listAndRates/xdai.spec.js b/test/specs/mainnet/listAndRates/xdai.spec.js deleted file mode 100644 index 8230258..0000000 --- a/test/specs/mainnet/listAndRates/xdai.spec.js +++ /dev/null @@ -1,822 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let xdaiMainNetSdk; -let xdaiEtherspotWalletAddress; -let xdaiNativeAddress = null; -let xdaiDataService; -let runTest; - -describe('The PrimeSDK, when get the NFT List, Token List and Exchange Rates details with xdai network on the MainNet (with old wallet)', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - xdaiMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - xdaiEtherspotWalletAddress = - await xdaiMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - xdaiEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - xdaiDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await xdaiDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === xdaiNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_xdaiUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the NFT List on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let nfts; - try { - nfts = await xdaiDataService.getNftList({ - chainId: Number(data.xdai_chainid), - account: data.sender, - }); - - if (nfts.items.length > 0) { - addContext(test, message.pass_nft_list_1); - console.log(message.pass_nftList_1); - - try { - assert.isNotEmpty( - nfts.items[0].contractAddress, - message.vali_nftList_contractAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].tokenType, - message.vali_nftList_tokenType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - nfts.items[0].balance, - message.vali_nftList_balance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].items[0].tokenId, - message.vali_nftList_items_tokenId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - nfts.items[0].items[0].amount, - message.vali_nftList_items_amount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_nftList_2); - console.log(message.pass_nftList_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Token List on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let tokenLists; - let tokenListTokens; - try { - tokenLists = await xdaiDataService.getTokenLists({ - chainId: data.xdai_chainid, - }); - - if (tokenLists.length > 0) { - console.log(message.pass_tokenList_1); - addContext(test, message.pass_tokenList_1); - - try { - assert.isNotEmpty( - tokenLists[0].name, - message.vali_tokenList_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenLists[0].endpoint, - message.vali_tokenList_endpoint - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_2); - console.log(message.pass_tokenList_2); - } - - tokenListTokens = await xdaiDataService.getTokenListTokens({ - chainId: data.xdai_chainid, - }); - - if (tokenListTokens.length > 0) { - console.log(message.pass_tokenList_3); - addContext(test, message.pass_tokenList_3); - - try { - assert.isNotEmpty( - tokenListTokens[0].address, - message.vali_tokenListTokens_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].name, - message.vali_tokenListTokens_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].symbol, - message.vali_tokenListTokens_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].decimals, - message.vali_tokenListTokens_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].logoURI, - message.vali_tokenListTokens_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].chainId, - message.vali_tokenListTokens_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_4); - console.log(message.pass_tokenList_4); - } - - if (tokenLists.length > 0) { - const { name } = tokenLists[0]; - - tokenListTokens = await xdaiDataService.getTokenListTokens({ - chainId: data.xdai_chainid, - name, - }); - - if (tokenListTokens.length > 0) { - addContext(test, message.pass_tokenList_5); - console.log(message.pass_tokenList_5); - - try { - assert.isNotEmpty( - tokenListTokens[0].address, - message.vali_selectedTokenListTokens_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].name, - message.vali_selectedTokenListTokens_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].symbol, - message.vali_selectedTokenListTokens_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].decimals, - message.vali_selectedTokenListTokens_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].logoURI, - message.vali_selectedTokenListTokens_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].chainId, - message.vali_selectedTokenListTokens_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_6); - console.log(message.pass_tokenList_6); - } - } else { - addContext(test, message.pass_tokenList_2); - console.log(message.pass_tokenList_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_tokenList_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.tokenList_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Exchange Rates on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let TOKEN_LIST; - let rates; - let requestPayload; - try { - TOKEN_LIST = [data.tokenAddress_xdaiUSDC, data.tokenAddress_xdaiUSDT]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.xdai_chainid), - }; - - rates = await xdaiDataService.fetchExchangeRates(requestPayload); - - for (let i = 0; i < rates.items.length; i++) { - try { - assert.isNotEmpty( - rates.items[i].address, - message.vali_exchangeRates_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].eth, - message.vali_exchangeRates_eth - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].eur, - message.vali_exchangeRates_eur - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].gbp, - message.vali_exchangeRates_gbp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].usd, - message.vali_exchangeRates_usd - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the NFT List with invalid account address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await xdaiDataService.getNftList({ - chainId: Number(data.xdai_chainid), - account: data.invalidSender, - }); - - addContext(test, message.fail_nftList_2); - assert.fail(message.fail_nftList_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.pass_nftList_3); - console.log(message.pass_nftList_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the NFT List with incorrect account address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await xdaiDataService.getNftList({ - chainId: Number(data.xdai_chainid), - account: data.incorrectSender, - }); - - addContext(test, message.fail_nftList_3); - assert.fail(message.fail_nftList_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.pass_nftList_4); - console.log(message.pass_nftList_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with other token address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - try { - TOKEN_LIST = [ - data.tokenAddress_xdaiUSDC, - data.tokenAddress_xdaiUSDT, - data.tokenAddress_maticUSDC, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.xdai_chainid), - }; - - await xdaiDataService.fetchExchangeRates(requestPayload); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with invalid token address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - try { - TOKEN_LIST = [ - data.invalidTokenAddress_xdaiUSDC, - data.tokenAddress_xdaiUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.xdai_chainid), - }; - - await xdaiDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_3); - assert.fail(message.fail_exchangeRates_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_1); - console.log(message.pass_exchangeRates_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with incorrect token address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.incorrectTokenAddress_xdaiUSDC, - data.tokenAddress_xdaiUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.xdai_chainid), - }; - - await xdaiDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_4); - assert.fail(message.fail_exchangeRates_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_2); - console.log(message.pass_exchangeRates_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates without token address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let TOKEN_LIST = []; - - let requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.xdai_chainid), - }; - - await xdaiDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_5); - assert.fail(message.fail_exchangeRates_5); - } catch (e) { - let error = e.message; - if (error.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_3); - console.log(message.pass_exchangeRates_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with invalid chainid on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [data.tokenAddress_xdaiUSDC, data.tokenAddress_xdaiUSDT]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.invalid_xdai_chainid), - }; - - await xdaiDataService.fetchExchangeRates(requestPayload); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_4); - console.log(message.pass_exchangeRates_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates without chainid on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [data.tokenAddress_xdaiUSDC, data.tokenAddress_xdaiUSDT]; - - requestPayload = { - tokens: TOKEN_LIST, - }; - - await xdaiDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_7); - assert.fail(message.fail_exchangeRates_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_5); - console.log(message.pass_exchangeRates_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/paymaster/arbitrum.spec.js b/test/specs/mainnet/paymaster/arbitrum.spec.js deleted file mode 100644 index 82e8135..0000000 --- a/test/specs/mainnet/paymaster/arbitrum.spec.js +++ /dev/null @@ -1,4240 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { - PrimeSdk, - DataUtils, - EtherspotBundler, - ArkaPaymaster, -} from '@etherspot/prime-sdk'; -import { ethers, utils } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let arbitrumMainNetSdk; -let arbitrumEtherspotWalletAddress; -let arbitrumNativeAddress = null; -let arbitrumDataService; -let arkaPaymaster; -let runTest; - -/* eslint-disable prettier/prettier */ -describe('The PrimeSDK, when transaction with arka and pimlico paymasters with arbitrum network on the MainNet.', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - arbitrumMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.arbitrum_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.arbitrum_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - arbitrumMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - arbitrumEtherspotWalletAddress = - await arbitrumMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - arbitrumEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - arbitrumDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // initializating ArkaPaymaster... - try { - arkaPaymaster = new ArkaPaymaster( - Number(data.arbitrum_chainid), - process.env.API_KEY, - data.paymaster_arka - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_arka_initialize); - } - - // validate the balance of the wallet - try { - let output = await arbitrumDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === arbitrumNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_arbitrumUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Perform the transfer native token on arka paymaster on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await arbitrumMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `https://arka.etherspot.io?apiKey=${ - process.env.API_KEY - }&chainId=${Number(data.arbitrum_chainid)}`, - context: { mode: 'sponsor' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await arbitrumMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer token with arka pimlico paymaster on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let balance; - // get balance of the account address - try { - balance = await arbitrumMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /** - * The fetching of pimlico erc20 paymaster address is only required for the first time for each specified gas token since we need to approve the tokens to spend - * from the paymaster address on behalf of you. - */ - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - let uoHash1; - let transactionBatch; - let op; - let uoHash; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - - try { - assert.isNotEmpty( - paymasterAddress, - message.vali_pimlico_paymasterAddress_1 - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - let contract; - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - - contract = await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: encodedData, - }); - - try { - assert.isNotEmpty(contract.to, message.vali_erc20Contract_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(contract.data, message.vali_erc20Contract_data); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get estimation of transaction - try { - approveOp = await arbitrumMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - approveOp.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - uoHash1 = await arbitrumMainNetSdk.send(approveOp); - - try { - assert.isNotEmpty(uoHash1, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await arbitrumMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await arbitrumMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - } else { - addContext(test, message.fail_paymasterAddress_1); - assert.fail(message.fail_paymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer token with arka paymaster with validUntil and validAfter on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let balance; - let transactionBatch; - let op; - let uoHash; - - // get balance of the account address - try { - balance = await arbitrumMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await arbitrumMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await arbitrumMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the metadata of the arka paymaster on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the metadata - try { - let metadata = await arkaPaymaster.metadata(); - - try { - assert.isNotEmpty( - metadata.sponsorAddress, - message.vali_metadata_sponsorAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.sponsorWalletBalance, - message.vali_metadata_sponsorWalletBalance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.chainsSupported, - message.vali_metadata_chainsSupported - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.tokenPaymasters, - message.vali_metadata_tokenPaymasters - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_metadata_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get token paymaster address function of the arka paymaster on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the get token paymaster address - try { - let getTokenPaymasterAddress = - await arkaPaymaster.getTokenPaymasterAddress('USDC'); - - try { - assert.isNotEmpty( - getTokenPaymasterAddress, - message.vali_getTokenPaymasterAddress_tokenPaymasterAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the remove whitelist address function of the arka paymaster on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the remove whitelist address - try { - let removeWhitelist = await arkaPaymaster.removeWhitelist([ - data.sender, - ]); - - if (removeWhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the add whitelist address function of the arka paymaster on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the add whitelist address - try { - let addWhitelist = await arkaPaymaster.addWhitelist([data.sender]); - - if (addWhitelist.includes(constant.add_whitelist_1)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the check whitelist function of the arka paymaster on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the whilelist address - try { - let checkWhitelist = await arkaPaymaster.checkWhitelist(data.sender); - - if (checkWhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkWhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the deposit function of the arka paymaster on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the deposit - try { - let deposit = await arkaPaymaster.deposit(data.value); - - if (deposit.includes(constant.deposit_1)) { - addContext(test, message.vali_deposit_1); - console.log(message.vali_deposit_1); - } else { - addContext(test, message.fail_deposit_1); - assert.fail(message.fail_deposit_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid arka paymaster url on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: data.invalid_paymaster_arka, // invalid URL - api_key: process.env.API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - if (e.message === constant.not_found) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid API Key of arka paymaster on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INVALID_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with incorrect API Key of arka paymaster on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INCORRECT_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_5); - assert.fail(message.fail_estimateTransaction_5); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_4); - console.log(message.vali_estimateTransaction_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without API Key of arka paymaster on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - // without api_key - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL on the arbitrum network', async function () { - var test = this; - const invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${invalid_arka_url}/pimlicoAddress${queryString}`, // invalid paymaster URL - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - - if (paymasterAddress.includes(constant.not_found_2)) { - addContext(test, message.vali_pimlico_paymasterAddress_2); - console.log(message.vali_pimlico_paymasterAddress_2); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_2); - assert.fail(message.fail_pimlico_paymasterAddress_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid API Key in queryString on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.INVALID_API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_3); - console.log(message.vali_pimlico_paymasterAddress_3); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_3); - assert.fail(message.fail_pimlico_paymasterAddress_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without API Key in queryString on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?chainId=${Number(data.arbitrum_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_4); - console.log(message.vali_pimlico_paymasterAddress_4); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_4); - assert.fail(message.fail_pimlico_paymasterAddress_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid ChainID in queryString on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_arbitrum_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_3) { - addContext(test, message.vali_pimlico_paymasterAddress_5); - console.log(message.vali_pimlico_paymasterAddress_5); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_5); - assert.fail(message.fail_pimlico_paymasterAddress_5); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without ChainID in queryString on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - // without chainid in queryString - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_6); - console.log(message.vali_pimlico_paymasterAddress_6); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_6); - assert.fail(message.fail_pimlico_paymasterAddress_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Entry Point Address while fetching the paymaster address on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.invalidEntryPointAddress, // invalid entry point address - { token: data.usdc_token }, - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - const errorMessage = returnedValue.error; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_pimlico_paymasterAddress_7); - console.log(message.vali_pimlico_paymasterAddress_7); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_7); - assert.fail(message.fail_pimlico_paymasterAddress_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token while fetching the paymaster address on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.entryPointAddress, - { token: data.invalid_usdc_token }, // invalid token - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_1) { - addContext(test, message.vali_pimlico_paymasterAddress_8); - console.log(message.vali_pimlico_paymasterAddress_8); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_8); - assert.fail(message.fail_pimlico_paymasterAddress_8); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without parameters while fetching the paymaster address on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [], // without parametets - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_9); - console.log(message.vali_pimlico_paymasterAddress_9); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_9); - assert.fail(message.fail_pimlico_paymasterAddress_9); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect token address of the erc20 contract on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.incorrectTokenAddress_arbitrumUSDC, // incorrect token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_1); - console.log(message.vali_erc20Contract_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token address of the erc20 contract on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.invalidTokenAddress_arbitrumUSDC, // invalid token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_2); - console.log(message.vali_erc20Contract_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster address of the erc20 contract on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.invalid_paymasterAddress, ethers.constants.MaxUint256] // invalid paymaster address - ); - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Contract_3); - console.log(message.vali_erc20Contract_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_5); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect paymaster address of the erc20 contract on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.incorrect_paymasterAddress, ethers.constants.MaxUint256] // incorrect paymaster address - ); - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Contract_4); - console.log(message.vali_erc20Contract_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid value of the transactions on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await arbitrumMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await arbitrumMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.invalidValue), - }); - - addContext(test, message.fail_addTransaction_2); - assert.fail(message.fail_addTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_value_1)) { - addContext(test, vali_addTransaction_1); - console.log(vali_addTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL while estimate the transactions on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await arbitrumMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await arbitrumMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Api Key while estimate the transactions on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.arbitrum_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await arbitrumMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await arbitrumMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without Api Key while estimate the transactions on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - let invalid_queryString = `?chainId=${Number(data.arbitrum_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await arbitrumMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await arbitrumMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid chainid while estimate the transactions on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_arbitrum_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await arbitrumMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await arbitrumMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without chainid while estimate the transactions on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await arbitrumMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await arbitrumMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid paymaster URL on the arbitrum network', async function () { - var test = this; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.arbitrum_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid API Token on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.arbitrum_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without API Token on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?chainId=${Number(data.arbitrum_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid ChainID on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_arbitrum_chainid - )}`; // invalid ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without ChainID on the arbitrum network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await arbitrumMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get token paymaster address function of the arka paymaster with incorrect token on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(data.invalid_usdc_token); - - addContext(test, message.fail_getTokenPaymasterAddress_2); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_1)) { - addContext(test, message.vali_getTokenPaymasterAddress_1); - console.log(message.vali_getTokenPaymasterAddress_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get token paymaster address function of the arka paymaster without token on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(); - - addContext(test, message.fail_getTokenPaymasterAddress_3); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_getTokenPaymasterAddress_2); - console.log(message.vali_getTokenPaymasterAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with invalid address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.invalidSender]); - - addContext(test, message.fail_removeWhitelist_3); - assert.fail(message.fail_removeWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_3); - console.log(message.vali_removeWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with incorrect address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.incorrectSender]); - - addContext(test, message.fail_removeWhitelist_4); - assert.fail(message.fail_removeWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_4); - console.log(message.vali_removeWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([randomAddress.address]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random and whitelisted addresses on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress.address, - data.sender, - ]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple whitelisted addresses on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // make whitelisted addresses - await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - // remove whitelist addresses - let removewhitelist = await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (removewhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple random addresses on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - addContext(test, message.fail_removeWhitelist_6); - assert.fail(message.fail_removeWhitelist_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with invalid address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.invalidSender]); - - addContext(test, message.fail_addWhitelist_3); - assert.fail(message.fail_addWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_3); - console.log(message.vali_addWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with incorrect address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.incorrectSender]); - - addContext(test, message.fail_addWhitelist_4); - assert.fail(message.fail_addWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_4); - console.log(message.vali_addWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with random address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_5); - console.log(message.vali_addWhitelist_5); - } else { - addContext(test, message.fail_addWhitelist_7); - assert.fail(message.fail_addWhitelist_7); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with random and whitelisted addresses on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.addWhitelist([ - randomAddress.address, - data.sender, - ]); - - if (addWhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with multiple whitelisted addresses on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // add whitelist addresses - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the check whitelist function of the arka paymaster with invalid address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_2); - assert.fail(message.fail_checkWhitelist_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_1); - console.log(message.vali_checkWhitelist_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the check whitelist function of the arka paymaster with incorrect address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_3); - assert.fail(message.fail_checkWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_2); - console.log(message.vali_checkWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the check whitelist function of the arka paymaster with random address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let checkwhitelist = await arkaPaymaster.checkWhitelist( - randomAddress.address - ); - - if (checkwhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkwhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the check whitelist function of the arka paymaster without address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(); - - addContext(test, message.fail_checkWhitelist_5); - assert.fail(message.fail_checkWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_3); - console.log(message.vali_checkWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the deposit function of the arka paymaster with invalid amount on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the deposit - try { - await arkaPaymaster.deposit('one'); - - addContext(test, message.fail_deposit_3); - assert.fail(message.fail_deposit_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_deposit_2); - console.log(message.vali_deposit_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/paymaster/matic.spec.js b/test/specs/mainnet/paymaster/matic.spec.js deleted file mode 100644 index b129446..0000000 --- a/test/specs/mainnet/paymaster/matic.spec.js +++ /dev/null @@ -1,4240 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { - PrimeSdk, - DataUtils, - EtherspotBundler, - ArkaPaymaster, -} from '@etherspot/prime-sdk'; -import { ethers, utils } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let maticMainNetSdk; -let maticEtherspotWalletAddress; -let maticNativeAddress = null; -let maticDataService; -let arkaPaymaster; -let runTest; - -/* eslint-disable prettier/prettier */ -describe('The PrimeSDK, when transaction with arka and pimlico paymasters with matic network on the MainNet.', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - maticMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.matic_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.matic_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - maticMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - maticEtherspotWalletAddress = - await maticMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - maticEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - maticDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // initializating ArkaPaymaster... - try { - arkaPaymaster = new ArkaPaymaster( - Number(data.matic_chainid), - process.env.API_KEY, - data.paymaster_arka - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_arka_initialize); - } - - // validate the balance of the wallet - try { - let output = await maticDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.matic_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === maticNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_maticUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Perform the transfer native token on arka paymaster on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await maticMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `https://arka.etherspot.io?apiKey=${ - process.env.API_KEY - }&chainId=${Number(data.matic_chainid)}`, - context: { mode: 'sponsor' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await maticMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer token with arka pimlico paymaster on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let balance; - // get balance of the account address - try { - balance = await maticMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /** - * The fetching of pimlico erc20 paymaster address is only required for the first time for each specified gas token since we need to approve the tokens to spend - * from the paymaster address on behalf of you. - */ - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - let uoHash1; - let transactionBatch; - let op; - let uoHash; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - - try { - assert.isNotEmpty( - paymasterAddress, - message.vali_pimlico_paymasterAddress_1 - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - let contract; - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - - contract = await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: encodedData, - }); - - try { - assert.isNotEmpty(contract.to, message.vali_erc20Contract_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(contract.data, message.vali_erc20Contract_data); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get estimation of transaction - try { - approveOp = await maticMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - approveOp.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - uoHash1 = await maticMainNetSdk.send(approveOp); - - try { - assert.isNotEmpty(uoHash1, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await maticMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await maticMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - } else { - addContext(test, message.fail_paymasterAddress_1); - assert.fail(message.fail_paymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer token with arka paymaster with validUntil and validAfter on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let balance; - let transactionBatch; - let op; - let uoHash; - - // get balance of the account address - try { - balance = await maticMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await maticMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await maticMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the metadata of the arka paymaster on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the metadata - try { - let metadata = await arkaPaymaster.metadata(); - - try { - assert.isNotEmpty( - metadata.sponsorAddress, - message.vali_metadata_sponsorAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.sponsorWalletBalance, - message.vali_metadata_sponsorWalletBalance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.chainsSupported, - message.vali_metadata_chainsSupported - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.tokenPaymasters, - message.vali_metadata_tokenPaymasters - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_metadata_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get token paymaster address function of the arka paymaster on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the get token paymaster address - try { - let getTokenPaymasterAddress = - await arkaPaymaster.getTokenPaymasterAddress('USDC'); - - try { - assert.isNotEmpty( - getTokenPaymasterAddress, - message.vali_getTokenPaymasterAddress_tokenPaymasterAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the remove whitelist address function of the arka paymaster on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the remove whitelist address - try { - let removeWhitelist = await arkaPaymaster.removeWhitelist([ - data.sender, - ]); - - if (removeWhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the add whitelist address function of the arka paymaster on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the add whitelist address - try { - let addWhitelist = await arkaPaymaster.addWhitelist([data.sender]); - - if (addWhitelist.includes(constant.add_whitelist_1)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the check whitelist function of the arka paymaster on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the whilelist address - try { - let checkWhitelist = await arkaPaymaster.checkWhitelist(data.sender); - - if (checkWhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkWhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the deposit function of the arka paymaster on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the deposit - try { - let deposit = await arkaPaymaster.deposit(data.value); - - if (deposit.includes(constant.deposit_1)) { - addContext(test, message.vali_deposit_1); - console.log(message.vali_deposit_1); - } else { - addContext(test, message.fail_deposit_1); - assert.fail(message.fail_deposit_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid arka paymaster url on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: data.invalid_paymaster_arka, // invalid URL - api_key: process.env.API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - if (e.message === constant.not_found) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid API Key of arka paymaster on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INVALID_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with incorrect API Key of arka paymaster on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INCORRECT_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_5); - assert.fail(message.fail_estimateTransaction_5); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_4); - console.log(message.vali_estimateTransaction_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without API Key of arka paymaster on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - // without api_key - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL on the matic network', async function () { - var test = this; - const invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${invalid_arka_url}/pimlicoAddress${queryString}`, // invalid paymaster URL - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - - if (paymasterAddress.includes(constant.not_found_2)) { - addContext(test, message.vali_pimlico_paymasterAddress_2); - console.log(message.vali_pimlico_paymasterAddress_2); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_2); - assert.fail(message.fail_pimlico_paymasterAddress_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid API Key in queryString on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.INVALID_API_KEY}&chainId=${Number( - data.matic_chainid - )}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_3); - console.log(message.vali_pimlico_paymasterAddress_3); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_3); - assert.fail(message.fail_pimlico_paymasterAddress_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without API Key in queryString on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?chainId=${Number(data.matic_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_4); - console.log(message.vali_pimlico_paymasterAddress_4); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_4); - assert.fail(message.fail_pimlico_paymasterAddress_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid ChainID in queryString on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_matic_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_3) { - addContext(test, message.vali_pimlico_paymasterAddress_5); - console.log(message.vali_pimlico_paymasterAddress_5); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_5); - assert.fail(message.fail_pimlico_paymasterAddress_5); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without ChainID in queryString on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - // without chainid in queryString - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_6); - console.log(message.vali_pimlico_paymasterAddress_6); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_6); - assert.fail(message.fail_pimlico_paymasterAddress_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Entry Point Address while fetching the paymaster address on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.invalidEntryPointAddress, // invalid entry point address - { token: data.usdc_token }, - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - const errorMessage = returnedValue.error; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_pimlico_paymasterAddress_7); - console.log(message.vali_pimlico_paymasterAddress_7); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_7); - assert.fail(message.fail_pimlico_paymasterAddress_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token while fetching the paymaster address on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.entryPointAddress, - { token: data.invalid_usdc_token }, // invalid token - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_1) { - addContext(test, message.vali_pimlico_paymasterAddress_8); - console.log(message.vali_pimlico_paymasterAddress_8); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_8); - assert.fail(message.fail_pimlico_paymasterAddress_8); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without parameters while fetching the paymaster address on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [], // without parametets - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_9); - console.log(message.vali_pimlico_paymasterAddress_9); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_9); - assert.fail(message.fail_pimlico_paymasterAddress_9); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect token address of the erc20 contract on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.incorrectTokenAddress_maticUSDC, // incorrect token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_1); - console.log(message.vali_erc20Contract_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token address of the erc20 contract on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.invalidTokenAddress_maticUSDC, // invalid token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_2); - console.log(message.vali_erc20Contract_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster address of the erc20 contract on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.invalid_paymasterAddress, ethers.constants.MaxUint256] // invalid paymaster address - ); - await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Contract_3); - console.log(message.vali_erc20Contract_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_5); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect paymaster address of the erc20 contract on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.incorrect_paymasterAddress, ethers.constants.MaxUint256] // incorrect paymaster address - ); - await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Contract_4); - console.log(message.vali_erc20Contract_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid value of the transactions on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await maticMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await maticMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.invalidValue), - }); - - addContext(test, message.fail_addTransaction_2); - assert.fail(message.fail_addTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_value_1)) { - addContext(test, vali_addTransaction_1); - console.log(vali_addTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL while estimate the transactions on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await maticMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await maticMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Api Key while estimate the transactions on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.matic_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await maticMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await maticMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without Api Key while estimate the transactions on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - let invalid_queryString = `?chainId=${Number(data.matic_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await maticMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await maticMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid chainid while estimate the transactions on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_matic_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await maticMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await maticMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without chainid while estimate the transactions on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await maticMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await maticMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid paymaster URL on the matic network', async function () { - var test = this; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.matic_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid API Token on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.matic_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without API Token on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?chainId=${Number(data.matic_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid ChainID on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_matic_chainid - )}`; // invalid ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without ChainID on the matic network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await maticMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get token paymaster address function of the arka paymaster with incorrect token on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(data.invalid_usdc_token); - - addContext(test, message.fail_getTokenPaymasterAddress_2); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_1)) { - addContext(test, message.vali_getTokenPaymasterAddress_1); - console.log(message.vali_getTokenPaymasterAddress_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get token paymaster address function of the arka paymaster without token on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(); - - addContext(test, message.fail_getTokenPaymasterAddress_3); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_getTokenPaymasterAddress_2); - console.log(message.vali_getTokenPaymasterAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with invalid address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.invalidSender]); - - addContext(test, message.fail_removeWhitelist_3); - assert.fail(message.fail_removeWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_3); - console.log(message.vali_removeWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with incorrect address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.incorrectSender]); - - addContext(test, message.fail_removeWhitelist_4); - assert.fail(message.fail_removeWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_4); - console.log(message.vali_removeWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([randomAddress.address]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random and whitelisted addresses on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress.address, - data.sender, - ]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple whitelisted addresses on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // make whitelisted addresses - await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - // remove whitelist addresses - let removewhitelist = await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (removewhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple random addresses on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - addContext(test, message.fail_removeWhitelist_6); - assert.fail(message.fail_removeWhitelist_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with invalid address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.invalidSender]); - - addContext(test, message.fail_addWhitelist_3); - assert.fail(message.fail_addWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_3); - console.log(message.vali_addWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with incorrect address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.incorrectSender]); - - addContext(test, message.fail_addWhitelist_4); - assert.fail(message.fail_addWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_4); - console.log(message.vali_addWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with random address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_5); - console.log(message.vali_addWhitelist_5); - } else { - addContext(test, message.fail_addWhitelist_7); - assert.fail(message.fail_addWhitelist_7); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with random and whitelisted addresses on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.addWhitelist([ - randomAddress.address, - data.sender, - ]); - - if (addWhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with multiple whitelisted addresses on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // add whitelist addresses - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the check whitelist function of the arka paymaster with invalid address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_2); - assert.fail(message.fail_checkWhitelist_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_1); - console.log(message.vali_checkWhitelist_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the check whitelist function of the arka paymaster with incorrect address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_3); - assert.fail(message.fail_checkWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_2); - console.log(message.vali_checkWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the check whitelist function of the arka paymaster with random address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let checkwhitelist = await arkaPaymaster.checkWhitelist( - randomAddress.address - ); - - if (checkwhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkwhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the check whitelist function of the arka paymaster without address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(); - - addContext(test, message.fail_checkWhitelist_5); - assert.fail(message.fail_checkWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_3); - console.log(message.vali_checkWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the deposit function of the arka paymaster with invalid amount on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the deposit - try { - await arkaPaymaster.deposit('one'); - - addContext(test, message.fail_deposit_3); - assert.fail(message.fail_deposit_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_deposit_2); - console.log(message.vali_deposit_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/paymaster/newWallet_paymaster.spec.js b/test/specs/mainnet/paymaster/newWallet_paymaster.spec.js new file mode 100644 index 0000000..f7780b4 --- /dev/null +++ b/test/specs/mainnet/paymaster/newWallet_paymaster.spec.js @@ -0,0 +1,4564 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { + PrimeSdk, + DataUtils, + EtherspotBundler, + ArkaPaymaster, +} from '@etherspot/prime-sdk'; +import { ethers, utils } from 'ethers'; +import { assert } from 'chai'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidChainId, + randomInvalidTokenAddress, + randomTokenAddress, + randomTokenName, +} from '../../../utils/sharedData_mainnet.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let mainnetPrimeSdk; +let nativeAddress = null; +let dataService; +let arkaPaymaster; +let runTest; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Perform the transaction with arka and pimlico paymasters on the MainNet (with new wallet)', function () { + before(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }); + + it( + 'SMOKE: Perform the transfer native token on arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `https://arka.etherspot.io?apiKey=${ + process.env.API_KEY_ARKA + }&chainId=${Number(randomChainId)}`, + context: { mode: 'sponsor' }, + }, + }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the transfer token with arka pimlico paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY_ARKA}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + // wait for the execution + helper.wait(data.longTimeout); + + let balance; + // get balance of the account address + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + /** + * The fetching of pimlico erc20 paymaster address is only required for the first time for each specified gas token since we need to approve the tokens to spend + * from the paymaster address on behalf of you. + */ + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + let uoHash1; + let transactionBatch; + let op; + let uoHash; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + + try { + assert.isNotEmpty( + paymasterAddress, + message.vali_pimlico_paymasterAddress_1 + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + let contract; + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + + contract = await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + + try { + assert.isNotEmpty(contract.to, message.vali_erc20Contract_to); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + contract.data, + message.vali_erc20Contract_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get estimation of transaction + try { + approveOp = await mainnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + approveOp.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + uoHash1 = await mainnetPrimeSdk.send(approveOp); + + try { + assert.isNotEmpty( + uoHash1, + message.vali_submitTransaction_uoHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash... + console.log('Waiting for transaction...'); + let userOpsReceipt1 = null; + const timeout1 = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt1 == null && Date.now() < timeout1) { + helper.wait(data.mediumTimeout); + userOpsReceipt1 = await mainnetPrimeSdk.getUserOpReceipt(uoHash1); + } + + // wait for the execution + helper.wait(data.longTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + op = await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler... + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty( + uoHash, + message.vali_submitTransaction_uoHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + } else { + addContext(test, message.fail_paymasterAddress_1); + assert.fail(message.fail_paymasterAddress_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Perform the transfer token with arka paymaster with validUntil and validAfter on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let balance; + let transactionBatch; + let op; + let uoHash; + + // get balance of the account address + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + op = await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler... + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the metadata of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the metadata + try { + let metadata = await arkaPaymaster.metadata(); + + try { + assert.isNotEmpty( + metadata.sponsorAddress, + message.vali_metadata_sponsorAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + metadata.sponsorWalletBalance, + message.vali_metadata_sponsorWalletBalance + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + metadata.chainsSupported, + message.vali_metadata_chainsSupported + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + metadata.tokenPaymasters, + message.vali_metadata_tokenPaymasters + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the get token paymaster address function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the get token paymaster address + try { + let getTokenPaymasterAddress = + await arkaPaymaster.getTokenPaymasterAddress('USDC'); + + try { + assert.isNotEmpty( + getTokenPaymasterAddress, + message.vali_getTokenPaymasterAddress_tokenPaymasterAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTokenPaymasterAddress_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the remove whitelist address function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the remove whitelist address + try { + let removeWhitelist = await arkaPaymaster.removeWhitelist([ + data.sender, + ]); + + if (removeWhitelist.includes(constant.remove_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + } else { + addContext(test, message.fail_removeWhitelist_1); + assert.fail(message.fail_removeWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the add whitelist address function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the add whitelist address + try { + let addWhitelist = await arkaPaymaster.addWhitelist([data.sender]); + + if (addWhitelist.includes(constant.add_whitelist_1)) { + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + } else { + addContext(test, message.fail_addWhitelist_1); + assert.fail(message.fail_addWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the check whitelist function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the whilelist address + try { + let checkWhitelist = await arkaPaymaster.checkWhitelist( + data.sender + ); + + if (checkWhitelist.includes(constant.check_whitelist_1)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else if (checkWhitelist.includes(constant.check_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the deposit function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the deposit + try { + let deposit = await arkaPaymaster.deposit(data.value); + + if (deposit.includes(constant.deposit_1)) { + addContext(test, message.vali_deposit_1); + console.log(message.vali_deposit_1); + } else { + addContext(test, message.fail_deposit_1); + assert.fail(message.fail_deposit_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with invalid arka paymaster url on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.invalid_paymaster_arka, // invalid URL + api_key: process.env.API_KEY, + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_2); + assert.fail(message.fail_estimateTransaction_2); + } catch (e) { + if (e.message === constant.not_found) { + addContext(test, message.vali_estimateTransaction_1); + console.log(message.vali_estimateTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with invalid API Key of arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.paymaster_arka, + api_key: process.env.INVALID_API_KEY, + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_4); + assert.fail(message.fail_estimateTransaction_4); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with incorrect API Key of arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.paymaster_arka, + api_key: process.env.INCORRECT_API_KEY, + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_5); + assert.fail(message.fail_estimateTransaction_5); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_4); + console.log(message.vali_estimateTransaction_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token without API Key of arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.paymaster_arka, + // without api_key + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_6); + assert.fail(message.fail_estimateTransaction_6); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_5); + console.log(message.vali_estimateTransaction_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL on the ' + + randomChainName + + ' network', + async function () { + var test = this; + const invalid_arka_url = data.invalid_paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + + try { + returnedValue = await fetch( + `${invalid_arka_url}/pimlicoAddress${queryString}`, // invalid paymaster URL + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + + if (paymasterAddress.includes(constant.not_found_2)) { + addContext(test, message.vali_pimlico_paymasterAddress_2); + console.log(message.vali_pimlico_paymasterAddress_2); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_2); + assert.fail(message.fail_pimlico_paymasterAddress_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid API Key in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.INVALID_API_KEY}&chainId=${Number( + randomChainId + )}`; // invalid API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_apiKey) { + addContext(test, message.vali_pimlico_paymasterAddress_3); + console.log(message.vali_pimlico_paymasterAddress_3); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_3); + assert.fail(message.fail_pimlico_paymasterAddress_3); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without API Key in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?chainId=${Number(randomChainId)}`; // without API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_apiKey) { + addContext(test, message.vali_pimlico_paymasterAddress_4); + console.log(message.vali_pimlico_paymasterAddress_4); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_4); + assert.fail(message.fail_pimlico_paymasterAddress_4); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid ChainID in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomInvalidChainId + )}`; // invalid chainid in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_network_3) { + addContext(test, message.vali_pimlico_paymasterAddress_5); + console.log(message.vali_pimlico_paymasterAddress_5); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_5); + assert.fail(message.fail_pimlico_paymasterAddress_5); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without ChainID in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + // without chainid in queryString + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_data) { + addContext(test, message.vali_pimlico_paymasterAddress_6); + console.log(message.vali_pimlico_paymasterAddress_6); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_6); + assert.fail(message.fail_pimlico_paymasterAddress_6); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Entry Point Address while fetching the paymaster address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [ + data.invalidEntryPointAddress, // invalid entry point address + { token: randomTokenName }, + ], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + const errorMessage = returnedValue.error; + if (errorMessage.includes(constant.invalid_address_4)) { + addContext(test, message.vali_pimlico_paymasterAddress_7); + console.log(message.vali_pimlico_paymasterAddress_7); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_7); + assert.fail(message.fail_pimlico_paymasterAddress_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token while fetching the paymaster address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [ + data.entryPointAddress, + { token: data.invalid_usdc_token }, // invalid token + ], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_network_1) { + addContext(test, message.vali_pimlico_paymasterAddress_8); + console.log(message.vali_pimlico_paymasterAddress_8); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_8); + assert.fail(message.fail_pimlico_paymasterAddress_8); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without parameters while fetching the paymaster address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [], // without parametets + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_data) { + addContext(test, message.vali_pimlico_paymasterAddress_9); + console.log(message.vali_pimlico_paymasterAddress_9); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_9); + assert.fail(message.fail_pimlico_paymasterAddress_9); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect token address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomIncorrectTokenAddress, // incorrect token address + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.contract_address_1)) { + addContext(test, message.vali_erc20Contract_1); + console.log(message.vali_erc20Contract_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_3); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomInvalidTokenAddress, // invalid token address + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.contract_address_1)) { + addContext(test, message.vali_erc20Contract_2); + console.log(message.vali_erc20Contract_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_4); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + paymasterAddress = returnedValue.message; + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [data.invalid_paymasterAddress, ethers.constants.MaxUint256] // invalid paymaster address + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc20Contract_3); + console.log(message.vali_erc20Contract_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_5); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect paymaster address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + paymasterAddress = returnedValue.message; + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [data.incorrect_paymasterAddress, ethers.constants.MaxUint256] // incorrect paymaster address + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc20Contract_4); + console.log(message.vali_erc20Contract_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_6); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid value of the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await mainnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.invalidValue), + }); + + addContext(test, message.fail_addTransaction_2); + assert.fail(message.fail_addTransaction_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_value_1)) { + addContext(test, vali_addTransaction_1); + console.log(vali_addTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_3); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_arka_url = data.invalid_paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await mainnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${invalid_arka_url}${queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_2); + assert.fail(message.fail_estimateTransaction_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.not_found)) { + addContext(test, message.vali_estimateTransaction_1); + console.log(message.vali_estimateTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_2); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Api Key while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?apiKey=${ + process.env.INVALID_API_KEY + }&chainId=${Number(randomChainId)}`; // invalid API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await mainnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_4); + assert.fail(message.fail_estimateTransaction_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without Api Key while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?chainId=${Number(randomChainId)}`; // without API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await mainnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_6); + assert.fail(message.fail_estimateTransaction_6); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_estimateTransaction_5); + console.log(message.vali_estimateTransaction_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_6); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid chainid while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomInvalidChainId + )}`; // invalid chainid in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await mainnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_7); + assert.fail(message.fail_estimateTransaction_7); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_3)) { + addContext(test, message.vali_estimateTransaction_6); + console.log(message.vali_estimateTransaction_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_7); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without chainid while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await mainnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_8); + assert.fail(message.fail_estimateTransaction_8); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_estimateTransaction_7); + console.log(message.vali_estimateTransaction_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_8); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid paymaster URL on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let invalid_arka_url = data.invalid_paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${invalid_arka_url}${queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_2); + assert.fail(message.fail_estimateTransaction_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.not_found)) { + addContext(test, message.vali_estimateTransaction_1); + console.log(message.vali_estimateTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid API Token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?apiKey=${ + process.env.INVALID_API_KEY + }&chainId=${Number(randomChainId)}`; // invalid API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_4); + assert.fail(message.fail_estimateTransaction_4); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without API Token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?chainId=${Number(randomChainId)}`; // without API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_6); + assert.fail(message.fail_estimateTransaction_6); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid ChainID on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomInvalidChainId + )}`; // invalid ChainID in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_7); + assert.fail(message.fail_estimateTransaction_7); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_3)) { + addContext(test, message.vali_estimateTransaction_6); + console.log(message.vali_estimateTransaction_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without ChainID on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_8); + assert.fail(message.fail_estimateTransaction_8); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_estimateTransaction_7); + console.log(message.vali_estimateTransaction_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the get token paymaster address function of the arka paymaster with incorrect token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the get token paymaster address + try { + await arkaPaymaster.getTokenPaymasterAddress( + data.invalid_usdc_token + ); + + addContext(test, message.fail_getTokenPaymasterAddress_2); + assert.fail(message.fail_getTokenPaymasterAddress_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_1)) { + addContext(test, message.vali_getTokenPaymasterAddress_1); + console.log(message.vali_getTokenPaymasterAddress_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTokenPaymasterAddress_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the get token paymaster address function of the arka paymaster without token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the get token paymaster address + try { + await arkaPaymaster.getTokenPaymasterAddress(); + + addContext(test, message.fail_getTokenPaymasterAddress_3); + assert.fail(message.fail_getTokenPaymasterAddress_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_getTokenPaymasterAddress_2); + console.log(message.vali_getTokenPaymasterAddress_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTokenPaymasterAddress_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with invalid address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + await arkaPaymaster.removeWhitelist([data.invalidSender]); + + addContext(test, message.fail_removeWhitelist_3); + assert.fail(message.fail_removeWhitelist_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_removeWhitelist_3); + console.log(message.vali_removeWhitelist_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with incorrect address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + await arkaPaymaster.removeWhitelist([data.incorrectSender]); + + addContext(test, message.fail_removeWhitelist_4); + assert.fail(message.fail_removeWhitelist_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_removeWhitelist_4); + console.log(message.vali_removeWhitelist_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with random address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + await arkaPaymaster.removeWhitelist([randomAddress.address]); + + addContext(test, message.fail_removeWhitelist_5); + assert.fail(message.fail_removeWhitelist_5); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with random and whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + await arkaPaymaster.removeWhitelist([ + randomAddress.address, + data.sender, + ]); + + addContext(test, message.fail_removeWhitelist_5); + assert.fail(message.fail_removeWhitelist_5); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress1 = ethers.Wallet.createRandom(); + const randomAddress2 = ethers.Wallet.createRandom(); + + // make whitelisted addresses + await arkaPaymaster.addWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + // remove whitelist addresses + let removewhitelist = await arkaPaymaster.removeWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + if (removewhitelist.includes(constant.remove_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + } else { + addContext(test, message.fail_removeWhitelist_1); + assert.fail(message.fail_removeWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple random addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress1 = ethers.Wallet.createRandom(); + const randomAddress2 = ethers.Wallet.createRandom(); + await arkaPaymaster.removeWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + addContext(test, message.fail_removeWhitelist_6); + assert.fail(message.fail_removeWhitelist_6); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with invalid address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + await arkaPaymaster.addWhitelist([data.invalidSender]); + + addContext(test, message.fail_addWhitelist_3); + assert.fail(message.fail_addWhitelist_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_addWhitelist_3); + console.log(message.vali_addWhitelist_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with incorrect address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + await arkaPaymaster.addWhitelist([data.incorrectSender]); + + addContext(test, message.fail_addWhitelist_4); + assert.fail(message.fail_addWhitelist_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_addWhitelist_4); + console.log(message.vali_addWhitelist_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with random address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + let addwhitelist = await arkaPaymaster.addWhitelist([ + randomAddress.address, + ]); + + if (addwhitelist.includes(constant.add_whitelist_3)) { + addContext(test, message.vali_addWhitelist_5); + console.log(message.vali_addWhitelist_5); + } else { + addContext(test, message.fail_addWhitelist_7); + assert.fail(message.fail_addWhitelist_7); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with random and whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + await arkaPaymaster.addWhitelist([ + randomAddress.address, + data.sender, + ]); + + if (addWhitelist.includes(constant.add_whitelist_3)) { + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + } else { + addContext(test, message.fail_addWhitelist_1); + assert.fail(message.fail_addWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with multiple whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + const randomAddress1 = ethers.Wallet.createRandom(); + const randomAddress2 = ethers.Wallet.createRandom(); + + // add whitelist addresses + let addwhitelist = await arkaPaymaster.addWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + if (addwhitelist.includes(constant.add_whitelist_3)) { + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + } else { + addContext(test, message.fail_addWhitelist_1); + assert.fail(message.fail_addWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster with invalid address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + await arkaPaymaster.checkWhitelist(data.invalidSender); + + addContext(test, message.fail_checkWhitelist_2); + assert.fail(message.fail_checkWhitelist_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_checkWhitelist_1); + console.log(message.vali_checkWhitelist_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster with incorrect address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + await arkaPaymaster.checkWhitelist(data.invalidSender); + + addContext(test, message.fail_checkWhitelist_3); + assert.fail(message.fail_checkWhitelist_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_checkWhitelist_2); + console.log(message.vali_checkWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster with random address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + let checkwhitelist = await arkaPaymaster.checkWhitelist( + randomAddress.address + ); + + if (checkwhitelist.includes(constant.check_whitelist_1)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else if (checkwhitelist.includes(constant.check_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_4); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster without address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + await arkaPaymaster.checkWhitelist(); + + addContext(test, message.fail_checkWhitelist_5); + assert.fail(message.fail_checkWhitelist_5); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_checkWhitelist_3); + console.log(message.vali_checkWhitelist_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the deposit function of the arka paymaster with invalid amount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the deposit + try { + await arkaPaymaster.deposit('one'); + + addContext(test, message.fail_deposit_3); + assert.fail(message.fail_deposit_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_2); + console.log(message.vali_deposit_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/mainnet/paymaster/newWallet_xdai.spec.js b/test/specs/mainnet/paymaster/newWallet_xdai.spec.js deleted file mode 100644 index f72d010..0000000 --- a/test/specs/mainnet/paymaster/newWallet_xdai.spec.js +++ /dev/null @@ -1,4267 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { - PrimeSdk, - DataUtils, - EtherspotBundler, - ArkaPaymaster, -} from '@etherspot/prime-sdk'; -import { ethers, utils } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import testUtils from '../../../utils/testUtils.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; -import { dirname } from 'path'; -import { fileURLToPath } from 'url'; -import fs from 'fs'; -import path from 'path'; - -let xdaiMainNetSdk; -let xdaiNativeAddress = null; -let xdaiDataService; -let arkaPaymaster; -let runTest; -const __dirname = dirname(fileURLToPath(import.meta.url)); - -describe('The PrimeSDK, when transaction with arka and pimlico paymasters with xdai network on the MainNet (with new wallet)', function () { - before(async function () { - const filePath = path.join(__dirname, '../../../utils/testUtils.json'); - const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: sharedState.newPrivateKey }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // initializating Data service... - try { - xdaiDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - }, data.retry); // Retry this async test up to 5 times - }); - - beforeEach(async function () { - // validate the balance of the wallet - try { - let output = await xdaiDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === xdaiNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_xdaiUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }); - - it('SMOKE: Perform the transfer native token on arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `https://arka.etherspot.io?apiKey=${ - process.env.API_KEY - }&chainId=${Number(data.xdai_chainid)}`, - context: { mode: 'sponsor' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer token with arka pimlico paymaster on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - // wait for the execution - helper.wait(data.longTimeout); - - let balance; - // get balance of the account address - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /** - * The fetching of pimlico erc20 paymaster address is only required for the first time for each specified gas token since we need to approve the tokens to spend - * from the paymaster address on behalf of you. - */ - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - let uoHash1; - let transactionBatch; - let op; - let uoHash; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - - try { - assert.isNotEmpty( - paymasterAddress, - message.vali_pimlico_paymasterAddress_1 - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - let contract; - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - - contract = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - - try { - assert.isNotEmpty(contract.to, message.vali_erc20Contract_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(contract.data, message.vali_erc20Contract_data); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get estimation of transaction - try { - approveOp = await xdaiMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - approveOp.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - uoHash1 = await xdaiMainNetSdk.send(approveOp); - - try { - assert.isNotEmpty(uoHash1, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash... - console.log('Waiting for transaction...'); - let userOpsReceipt1 = null; - const timeout1 = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt1 == null && Date.now() < timeout1) { - helper.wait(data.mediumTimeout); - userOpsReceipt1 = await xdaiMainNetSdk.getUserOpReceipt(uoHash1); - } - console.log('Transaction Receipt: ', userOpsReceipt1); - - // wait for the execution - helper.wait(data.longTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - } else { - addContext(test, message.fail_paymasterAddress_1); - assert.fail(message.fail_paymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer token with arka paymaster with validUntil and validAfter on the XDAI network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - let balance; - let transactionBatch; - let op; - let uoHash; - - // get balance of the account address - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the metadata of the arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the metadata - try { - let metadata = await arkaPaymaster.metadata(); - - try { - assert.isNotEmpty( - metadata.sponsorAddress, - message.vali_metadata_sponsorAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.sponsorWalletBalance, - message.vali_metadata_sponsorWalletBalance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.chainsSupported, - message.vali_metadata_chainsSupported - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.tokenPaymasters, - message.vali_metadata_tokenPaymasters - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_metadata_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the get token paymaster address function of the arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the get token paymaster address - try { - let getTokenPaymasterAddress = - await arkaPaymaster.getTokenPaymasterAddress('USDC'); - - try { - assert.isNotEmpty( - getTokenPaymasterAddress, - message.vali_getTokenPaymasterAddress_tokenPaymasterAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the remove whitelist address function of the arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the remove whitelist address - try { - let removeWhitelist = await arkaPaymaster.removeWhitelist([ - data.sender, - ]); - - if (removeWhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the add whitelist address function of the arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the add whitelist address - try { - let addWhitelist = await arkaPaymaster.addWhitelist([data.sender]); - - if (addWhitelist.includes(constant.add_whitelist_1)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the check whitelist function of the arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the whilelist address - try { - let checkWhitelist = await arkaPaymaster.checkWhitelist(data.sender); - - if (checkWhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkWhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the deposit function of the arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the deposit - try { - let deposit = await arkaPaymaster.deposit(data.value); - - if (deposit.includes(constant.deposit_1)) { - addContext(test, message.vali_deposit_1); - console.log(message.vali_deposit_1); - } else { - addContext(test, message.fail_deposit_1); - assert.fail(message.fail_deposit_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid arka paymaster url on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: data.invalid_paymaster_arka, // invalid URL - api_key: process.env.API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - if (e.message === constant.not_found) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid API Key of arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INVALID_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with incorrect API Key of arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INCORRECT_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_5); - assert.fail(message.fail_estimateTransaction_5); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_4); - console.log(message.vali_estimateTransaction_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without API Key of arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - // without api_key - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL on the xdai network', async function () { - var test = this; - const invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${invalid_arka_url}/pimlicoAddress${queryString}`, // invalid paymaster URL - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - - if (paymasterAddress.includes(constant.not_found_2)) { - addContext(test, message.vali_pimlico_paymasterAddress_2); - console.log(message.vali_pimlico_paymasterAddress_2); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_2); - assert.fail(message.fail_pimlico_paymasterAddress_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid API Key in queryString on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.INVALID_API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_3); - console.log(message.vali_pimlico_paymasterAddress_3); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_3); - assert.fail(message.fail_pimlico_paymasterAddress_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without API Key in queryString on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?chainId=${Number(data.xdai_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_4); - console.log(message.vali_pimlico_paymasterAddress_4); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_4); - assert.fail(message.fail_pimlico_paymasterAddress_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid ChainID in queryString on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_xdai_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_3) { - addContext(test, message.vali_pimlico_paymasterAddress_5); - console.log(message.vali_pimlico_paymasterAddress_5); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_5); - assert.fail(message.fail_pimlico_paymasterAddress_5); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without ChainID in queryString on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - // without chainid in queryString - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_6); - console.log(message.vali_pimlico_paymasterAddress_6); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_6); - assert.fail(message.fail_pimlico_paymasterAddress_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Entry Point Address while fetching the paymaster address on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.invalidEntryPointAddress, // invalid entry point address - { token: data.usdc_token }, - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - const errorMessage = returnedValue.error; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_pimlico_paymasterAddress_7); - console.log(message.vali_pimlico_paymasterAddress_7); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_7); - assert.fail(message.fail_pimlico_paymasterAddress_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token while fetching the paymaster address on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.entryPointAddress, - { token: data.invalid_usdc_token }, // invalid token - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_1) { - addContext(test, message.vali_pimlico_paymasterAddress_8); - console.log(message.vali_pimlico_paymasterAddress_8); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_8); - assert.fail(message.fail_pimlico_paymasterAddress_8); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without parameters while fetching the paymaster address on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [], // without parametets - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_9); - console.log(message.vali_pimlico_paymasterAddress_9); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_9); - assert.fail(message.fail_pimlico_paymasterAddress_9); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect token address of the erc20 contract on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.incorrectTokenAddress_xdaiUSDC, // incorrect token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_1); - console.log(message.vali_erc20Contract_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token address of the erc20 contract on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.invalidTokenAddress_xdaiUSDC, // invalid token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_2); - console.log(message.vali_erc20Contract_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster address of the erc20 contract on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.invalid_paymasterAddress, ethers.constants.MaxUint256] // invalid paymaster address - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Contract_3); - console.log(message.vali_erc20Contract_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_5); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect paymaster address of the erc20 contract on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.incorrect_paymasterAddress, ethers.constants.MaxUint256] // incorrect paymaster address - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Contract_4); - console.log(message.vali_erc20Contract_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid value of the transactions on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await xdaiMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.invalidValue), - }); - - addContext(test, message.fail_addTransaction_2); - assert.fail(message.fail_addTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_value_1)) { - addContext(test, vali_addTransaction_1); - console.log(vali_addTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL while estimate the transactions on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await xdaiMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Api Key while estimate the transactions on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.xdai_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await xdaiMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without Api Key while estimate the transactions on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - let invalid_queryString = `?chainId=${Number(data.xdai_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await xdaiMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid chainid while estimate the transactions on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_xdai_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await xdaiMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without chainid while estimate the transactions on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await xdaiMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid paymaster URL on the XDAI network', async function () { - var test = this; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid API Token on the XDAI network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.xdai_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without API Token on the XDAI network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?chainId=${Number(data.xdai_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid ChainID on the XDAI network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_xdai_chainid - )}`; // invalid ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without ChainID on the XDAI network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the get token paymaster address function of the arka paymaster with incorrect token on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(data.invalid_usdc_token); - - addContext(test, message.fail_getTokenPaymasterAddress_2); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_1)) { - addContext(test, message.vali_getTokenPaymasterAddress_1); - console.log(message.vali_getTokenPaymasterAddress_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the get token paymaster address function of the arka paymaster without token on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(); - - addContext(test, message.fail_getTokenPaymasterAddress_3); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_getTokenPaymasterAddress_2); - console.log(message.vali_getTokenPaymasterAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with invalid address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.invalidSender]); - - addContext(test, message.fail_removeWhitelist_3); - assert.fail(message.fail_removeWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_3); - console.log(message.vali_removeWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with incorrect address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.incorrectSender]); - - addContext(test, message.fail_removeWhitelist_4); - assert.fail(message.fail_removeWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_4); - console.log(message.vali_removeWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([randomAddress.address]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random and whitelisted addresses on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress.address, - data.sender, - ]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple whitelisted addresses on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // make whitelisted addresses - await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - // remove whitelist addresses - let removewhitelist = await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (removewhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple random addresses on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - addContext(test, message.fail_removeWhitelist_6); - assert.fail(message.fail_removeWhitelist_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with invalid address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.invalidSender]); - - addContext(test, message.fail_addWhitelist_3); - assert.fail(message.fail_addWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_3); - console.log(message.vali_addWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with incorrect address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.incorrectSender]); - - addContext(test, message.fail_addWhitelist_4); - assert.fail(message.fail_addWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_4); - console.log(message.vali_addWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with random address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_5); - console.log(message.vali_addWhitelist_5); - } else { - addContext(test, message.fail_addWhitelist_7); - assert.fail(message.fail_addWhitelist_7); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with random and whitelisted addresses on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.addWhitelist([ - randomAddress.address, - data.sender, - ]); - - if (addWhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with multiple whitelisted addresses on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // add whitelist addresses - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster with invalid address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_2); - assert.fail(message.fail_checkWhitelist_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_1); - console.log(message.vali_checkWhitelist_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster with incorrect address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_3); - assert.fail(message.fail_checkWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_2); - console.log(message.vali_checkWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster with random address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let checkwhitelist = await arkaPaymaster.checkWhitelist( - randomAddress.address - ); - - if (checkwhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkwhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster without address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(); - - addContext(test, message.fail_checkWhitelist_5); - assert.fail(message.fail_checkWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_3); - console.log(message.vali_checkWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the deposit function of the arka paymaster with invalid amount on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the deposit - try { - await arkaPaymaster.deposit('one'); - - addContext(test, message.fail_deposit_3); - assert.fail(message.fail_deposit_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_deposit_2); - console.log(message.vali_deposit_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/paymaster/oldWallet_paymaster.spec.js b/test/specs/mainnet/paymaster/oldWallet_paymaster.spec.js new file mode 100644 index 0000000..23aa27d --- /dev/null +++ b/test/specs/mainnet/paymaster/oldWallet_paymaster.spec.js @@ -0,0 +1,4596 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { + PrimeSdk, + DataUtils, + EtherspotBundler, + ArkaPaymaster, +} from '@etherspot/prime-sdk'; +import { ethers, utils } from 'ethers'; +import { assert } from 'chai'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidChainId, + randomInvalidTokenAddress, + randomTokenAddress, + randomTokenName, +} from '../../../utils/sharedData_mainnet.js'; + +let mainnetPrimeSdk; +let etherspotWalletAddress; +let nativeAddress = null; +let dataService; +let arkaPaymaster; +let runTest; + +describe('Perform the transaction with arka and pimlico paymasters on the MainNet (with old wallet)', function () { + before(async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + + try { + assert.strictEqual( + mainnetPrimeSdk.state.EOAAddress, + data.eoaAddress, + message.vali_eoa_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get EtherspotWallet address + try { + etherspotWalletAddress = + await mainnetPrimeSdk.getCounterFactualAddress(); + + try { + assert.strictEqual( + etherspotWalletAddress, + data.sender, + message.vali_smart_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e.message); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + var test = this; + + await customRetryAsync(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + it( + 'SMOKE: Perform the transfer native token on arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `https://arka.etherspot.io?apiKey=${ + process.env.API_KEY_ARKA + }&chainId=${Number(randomChainId)}`, + context: { mode: 'sponsor' }, + }, + }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the transfer token with arka pimlico paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY_ARKA}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + // wait for the execution + helper.wait(data.longTimeout); + + let balance; + // get balance of the account address + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + /** + * The fetching of pimlico erc20 paymaster address is only required for the first time for each specified gas token since we need to approve the tokens to spend + * from the paymaster address on behalf of you. + */ + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + let uoHash1; + let transactionBatch; + let op; + let uoHash; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + + try { + assert.isNotEmpty( + paymasterAddress, + message.vali_pimlico_paymasterAddress_1 + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + let contract; + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + + contract = await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + + try { + assert.isNotEmpty(contract.to, message.vali_erc20Contract_to); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + contract.data, + message.vali_erc20Contract_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get estimation of transaction + try { + approveOp = await mainnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + approveOp.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + uoHash1 = await mainnetPrimeSdk.send(approveOp); + + try { + assert.isNotEmpty( + uoHash1, + message.vali_submitTransaction_uoHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash... + console.log('Waiting for transaction...'); + let userOpsReceipt1 = null; + const timeout1 = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt1 == null && Date.now() < timeout1) { + helper.wait(data.mediumTimeout); + userOpsReceipt1 = await mainnetPrimeSdk.getUserOpReceipt(uoHash1); + } + + // wait for the execution + helper.wait(data.longTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + op = await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler... + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty( + uoHash, + message.vali_submitTransaction_uoHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + } else { + addContext(test, message.fail_paymasterAddress_1); + assert.fail(message.fail_paymasterAddress_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Perform the transfer token with arka paymaster with validUntil and validAfter on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let balance; + let transactionBatch; + let op; + let uoHash; + + // get balance of the account address + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + op = await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler... + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the metadata of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the metadata + try { + let metadata = await arkaPaymaster.metadata(); + + try { + assert.isNotEmpty( + metadata.sponsorAddress, + message.vali_metadata_sponsorAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + metadata.sponsorWalletBalance, + message.vali_metadata_sponsorWalletBalance + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + metadata.chainsSupported, + message.vali_metadata_chainsSupported + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + metadata.tokenPaymasters, + message.vali_metadata_tokenPaymasters + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the get token paymaster address function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the get token paymaster address + try { + let getTokenPaymasterAddress = + await arkaPaymaster.getTokenPaymasterAddress('USDC'); + + try { + assert.isNotEmpty( + getTokenPaymasterAddress, + message.vali_getTokenPaymasterAddress_tokenPaymasterAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTokenPaymasterAddress_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the remove whitelist address function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the remove whitelist address + try { + let removeWhitelist = await arkaPaymaster.removeWhitelist([ + data.sender, + ]); + + if (removeWhitelist.includes(constant.remove_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + } else { + addContext(test, message.fail_removeWhitelist_1); + assert.fail(message.fail_removeWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the add whitelist address function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the add whitelist address + try { + let addWhitelist = await arkaPaymaster.addWhitelist([data.sender]); + + if (addWhitelist.includes(constant.add_whitelist_1)) { + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + } else { + addContext(test, message.fail_addWhitelist_1); + assert.fail(message.fail_addWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the check whitelist function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the whilelist address + try { + let checkWhitelist = await arkaPaymaster.checkWhitelist( + data.sender + ); + + if (checkWhitelist.includes(constant.check_whitelist_1)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else if (checkWhitelist.includes(constant.check_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the deposit function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the deposit + try { + let deposit = await arkaPaymaster.deposit(data.value); + + if (deposit.includes(constant.deposit_1)) { + addContext(test, message.vali_deposit_1); + console.log(message.vali_deposit_1); + } else { + addContext(test, message.fail_deposit_1); + assert.fail(message.fail_deposit_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with invalid arka paymaster url on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.invalid_paymaster_arka, // invalid URL + api_key: process.env.API_KEY, + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_2); + assert.fail(message.fail_estimateTransaction_2); + } catch (e) { + if (e.message === constant.not_found) { + addContext(test, message.vali_estimateTransaction_1); + console.log(message.vali_estimateTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with invalid API Key of arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.paymaster_arka, + api_key: process.env.INVALID_API_KEY, + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_4); + assert.fail(message.fail_estimateTransaction_4); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with incorrect API Key of arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.paymaster_arka, + api_key: process.env.INCORRECT_API_KEY, + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_5); + assert.fail(message.fail_estimateTransaction_5); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_4); + console.log(message.vali_estimateTransaction_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token without API Key of arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.paymaster_arka, + // without api_key + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_6); + assert.fail(message.fail_estimateTransaction_6); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_5); + console.log(message.vali_estimateTransaction_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL on the ' + + randomChainName + + ' network', + async function () { + var test = this; + const invalid_arka_url = data.invalid_paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + + try { + returnedValue = await fetch( + `${invalid_arka_url}/pimlicoAddress${queryString}`, // invalid paymaster URL + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + + if (paymasterAddress.includes(constant.not_found_2)) { + addContext(test, message.vali_pimlico_paymasterAddress_2); + console.log(message.vali_pimlico_paymasterAddress_2); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_2); + assert.fail(message.fail_pimlico_paymasterAddress_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid API Key in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.INVALID_API_KEY}&chainId=${Number( + randomChainId + )}`; // invalid API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_apiKey) { + addContext(test, message.vali_pimlico_paymasterAddress_3); + console.log(message.vali_pimlico_paymasterAddress_3); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_3); + assert.fail(message.fail_pimlico_paymasterAddress_3); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without API Key in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?chainId=${Number(randomChainId)}`; // without API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_apiKey) { + addContext(test, message.vali_pimlico_paymasterAddress_4); + console.log(message.vali_pimlico_paymasterAddress_4); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_4); + assert.fail(message.fail_pimlico_paymasterAddress_4); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid ChainID in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomInvalidChainId + )}`; // invalid chainid in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_network_3) { + addContext(test, message.vali_pimlico_paymasterAddress_5); + console.log(message.vali_pimlico_paymasterAddress_5); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_5); + assert.fail(message.fail_pimlico_paymasterAddress_5); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without ChainID in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + // without chainid in queryString + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_data) { + addContext(test, message.vali_pimlico_paymasterAddress_6); + console.log(message.vali_pimlico_paymasterAddress_6); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_6); + assert.fail(message.fail_pimlico_paymasterAddress_6); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Entry Point Address while fetching the paymaster address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [ + data.invalidEntryPointAddress, // invalid entry point address + { token: randomTokenName }, + ], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + const errorMessage = returnedValue.error; + if (errorMessage.includes(constant.invalid_address_4)) { + addContext(test, message.vali_pimlico_paymasterAddress_7); + console.log(message.vali_pimlico_paymasterAddress_7); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_7); + assert.fail(message.fail_pimlico_paymasterAddress_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token while fetching the paymaster address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [ + data.entryPointAddress, + { token: data.invalid_usdc_token }, // invalid token + ], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_network_1) { + addContext(test, message.vali_pimlico_paymasterAddress_8); + console.log(message.vali_pimlico_paymasterAddress_8); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_8); + assert.fail(message.fail_pimlico_paymasterAddress_8); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without parameters while fetching the paymaster address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [], // without parametets + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_data) { + addContext(test, message.vali_pimlico_paymasterAddress_9); + console.log(message.vali_pimlico_paymasterAddress_9); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_9); + assert.fail(message.fail_pimlico_paymasterAddress_9); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect token address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomIncorrectTokenAddress, // incorrect token address + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.contract_address_1)) { + addContext(test, message.vali_erc20Contract_1); + console.log(message.vali_erc20Contract_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_3); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomInvalidTokenAddress, // invalid token address + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.contract_address_1)) { + addContext(test, message.vali_erc20Contract_2); + console.log(message.vali_erc20Contract_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_4); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + paymasterAddress = returnedValue.message; + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [data.invalid_paymasterAddress, ethers.constants.MaxUint256] // invalid paymaster address + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc20Contract_3); + console.log(message.vali_erc20Contract_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_5); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect paymaster address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + paymasterAddress = returnedValue.message; + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [data.incorrect_paymasterAddress, ethers.constants.MaxUint256] // incorrect paymaster address + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc20Contract_4); + console.log(message.vali_erc20Contract_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_6); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid value of the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await mainnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.invalidValue), + }); + + addContext(test, message.fail_addTransaction_2); + assert.fail(message.fail_addTransaction_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_value_1)) { + addContext(test, vali_addTransaction_1); + console.log(vali_addTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_3); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_arka_url = data.invalid_paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await mainnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${invalid_arka_url}${queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_2); + assert.fail(message.fail_estimateTransaction_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.not_found)) { + addContext(test, message.vali_estimateTransaction_1); + console.log(message.vali_estimateTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_2); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Api Key while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?apiKey=${ + process.env.INVALID_API_KEY + }&chainId=${Number(randomChainId)}`; // invalid API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await mainnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_4); + assert.fail(message.fail_estimateTransaction_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without Api Key while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?chainId=${Number(randomChainId)}`; // without API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await mainnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_6); + assert.fail(message.fail_estimateTransaction_6); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_estimateTransaction_5); + console.log(message.vali_estimateTransaction_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_6); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid chainid while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomInvalidChainId + )}`; // invalid chainid in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await mainnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_7); + assert.fail(message.fail_estimateTransaction_7); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_3)) { + addContext(test, message.vali_estimateTransaction_6); + console.log(message.vali_estimateTransaction_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_7); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without chainid while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await mainnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_8); + assert.fail(message.fail_estimateTransaction_8); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_estimateTransaction_7); + console.log(message.vali_estimateTransaction_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_8); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid paymaster URL on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let invalid_arka_url = data.invalid_paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${invalid_arka_url}${queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_2); + assert.fail(message.fail_estimateTransaction_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.not_found)) { + addContext(test, message.vali_estimateTransaction_1); + console.log(message.vali_estimateTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid API Token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?apiKey=${ + process.env.INVALID_API_KEY + }&chainId=${Number(randomChainId)}`; // invalid API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_4); + assert.fail(message.fail_estimateTransaction_4); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without API Token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?chainId=${Number(randomChainId)}`; // without API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_6); + assert.fail(message.fail_estimateTransaction_6); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid ChainID on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomInvalidChainId + )}`; // invalid ChainID in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_7); + assert.fail(message.fail_estimateTransaction_7); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_3)) { + addContext(test, message.vali_estimateTransaction_6); + console.log(message.vali_estimateTransaction_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without ChainID on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await mainnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_8); + assert.fail(message.fail_estimateTransaction_8); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_estimateTransaction_7); + console.log(message.vali_estimateTransaction_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the get token paymaster address function of the arka paymaster with incorrect token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the get token paymaster address + try { + await arkaPaymaster.getTokenPaymasterAddress( + data.invalid_usdc_token + ); + + addContext(test, message.fail_getTokenPaymasterAddress_2); + assert.fail(message.fail_getTokenPaymasterAddress_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_1)) { + addContext(test, message.vali_getTokenPaymasterAddress_1); + console.log(message.vali_getTokenPaymasterAddress_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTokenPaymasterAddress_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the get token paymaster address function of the arka paymaster without token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the get token paymaster address + try { + await arkaPaymaster.getTokenPaymasterAddress(); + + addContext(test, message.fail_getTokenPaymasterAddress_3); + assert.fail(message.fail_getTokenPaymasterAddress_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_getTokenPaymasterAddress_2); + console.log(message.vali_getTokenPaymasterAddress_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTokenPaymasterAddress_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with invalid address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + await arkaPaymaster.removeWhitelist([data.invalidSender]); + + addContext(test, message.fail_removeWhitelist_3); + assert.fail(message.fail_removeWhitelist_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_removeWhitelist_3); + console.log(message.vali_removeWhitelist_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with incorrect address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + await arkaPaymaster.removeWhitelist([data.incorrectSender]); + + addContext(test, message.fail_removeWhitelist_4); + assert.fail(message.fail_removeWhitelist_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_removeWhitelist_4); + console.log(message.vali_removeWhitelist_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with random address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + await arkaPaymaster.removeWhitelist([randomAddress.address]); + + addContext(test, message.fail_removeWhitelist_5); + assert.fail(message.fail_removeWhitelist_5); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with random and whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + await arkaPaymaster.removeWhitelist([ + randomAddress.address, + data.sender, + ]); + + addContext(test, message.fail_removeWhitelist_5); + assert.fail(message.fail_removeWhitelist_5); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress1 = ethers.Wallet.createRandom(); + const randomAddress2 = ethers.Wallet.createRandom(); + + // make whitelisted addresses + await arkaPaymaster.addWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + // remove whitelist addresses + let removewhitelist = await arkaPaymaster.removeWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + if (removewhitelist.includes(constant.remove_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + } else { + addContext(test, message.fail_removeWhitelist_1); + assert.fail(message.fail_removeWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple random addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress1 = ethers.Wallet.createRandom(); + const randomAddress2 = ethers.Wallet.createRandom(); + await arkaPaymaster.removeWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + addContext(test, message.fail_removeWhitelist_6); + assert.fail(message.fail_removeWhitelist_6); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with invalid address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + await arkaPaymaster.addWhitelist([data.invalidSender]); + + addContext(test, message.fail_addWhitelist_3); + assert.fail(message.fail_addWhitelist_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_addWhitelist_3); + console.log(message.vali_addWhitelist_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with incorrect address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + await arkaPaymaster.addWhitelist([data.incorrectSender]); + + addContext(test, message.fail_addWhitelist_4); + assert.fail(message.fail_addWhitelist_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_addWhitelist_4); + console.log(message.vali_addWhitelist_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with random address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + let addwhitelist = await arkaPaymaster.addWhitelist([ + randomAddress.address, + ]); + + if (addwhitelist.includes(constant.add_whitelist_3)) { + addContext(test, message.vali_addWhitelist_5); + console.log(message.vali_addWhitelist_5); + } else { + addContext(test, message.fail_addWhitelist_7); + assert.fail(message.fail_addWhitelist_7); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with random and whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + await arkaPaymaster.addWhitelist([ + randomAddress.address, + data.sender, + ]); + + if (addWhitelist.includes(constant.add_whitelist_3)) { + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + } else { + addContext(test, message.fail_addWhitelist_1); + assert.fail(message.fail_addWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with multiple whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + const randomAddress1 = ethers.Wallet.createRandom(); + const randomAddress2 = ethers.Wallet.createRandom(); + + // add whitelist addresses + let addwhitelist = await arkaPaymaster.addWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + if (addwhitelist.includes(constant.add_whitelist_3)) { + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + } else { + addContext(test, message.fail_addWhitelist_1); + assert.fail(message.fail_addWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster with invalid address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + await arkaPaymaster.checkWhitelist(data.invalidSender); + + addContext(test, message.fail_checkWhitelist_2); + assert.fail(message.fail_checkWhitelist_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_checkWhitelist_1); + console.log(message.vali_checkWhitelist_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster with incorrect address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + await arkaPaymaster.checkWhitelist(data.invalidSender); + + addContext(test, message.fail_checkWhitelist_3); + assert.fail(message.fail_checkWhitelist_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_checkWhitelist_2); + console.log(message.vali_checkWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster with random address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + let checkwhitelist = await arkaPaymaster.checkWhitelist( + randomAddress.address + ); + + if (checkwhitelist.includes(constant.check_whitelist_1)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else if (checkwhitelist.includes(constant.check_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_4); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster without address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + await arkaPaymaster.checkWhitelist(); + + addContext(test, message.fail_checkWhitelist_5); + assert.fail(message.fail_checkWhitelist_5); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_checkWhitelist_3); + console.log(message.vali_checkWhitelist_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the deposit function of the arka paymaster with invalid amount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the deposit + try { + await arkaPaymaster.deposit('one'); + + addContext(test, message.fail_deposit_3); + assert.fail(message.fail_deposit_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_2); + console.log(message.vali_deposit_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/mainnet/paymaster/optimism.spec.js b/test/specs/mainnet/paymaster/optimism.spec.js deleted file mode 100644 index 82e2b24..0000000 --- a/test/specs/mainnet/paymaster/optimism.spec.js +++ /dev/null @@ -1,4240 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { - PrimeSdk, - DataUtils, - EtherspotBundler, - ArkaPaymaster, -} from '@etherspot/prime-sdk'; -import { ethers, utils } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let optimismMainNetSdk; -let optimismEtherspotWalletAddress; -let optimismNativeAddress = null; -let optimismDataService; -let arkaPaymaster; -let runTest; - -/* eslint-disable prettier/prettier */ -describe('The PrimeSDK, when transaction with arka and pimlico paymasters with optimism network on the MainNet.', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - optimismMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.optimism_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.optimism_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - optimismMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - optimismEtherspotWalletAddress = - await optimismMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - optimismEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - optimismDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // initializating ArkaPaymaster... - try { - arkaPaymaster = new ArkaPaymaster( - Number(data.optimism_chainid), - process.env.API_KEY, - data.paymaster_arka - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_arka_initialize); - } - - // validate the balance of the wallet - try { - let output = await optimismDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === optimismNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_optimismUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Perform the transfer native token on arka paymaster on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await optimismMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `https://arka.etherspot.io?apiKey=${ - process.env.API_KEY - }&chainId=${Number(data.optimism_chainid)}`, - context: { mode: 'sponsor' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await optimismMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer token with arka pimlico paymaster on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let balance; - // get balance of the account address - try { - balance = await optimismMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /** - * The fetching of pimlico erc20 paymaster address is only required for the first time for each specified gas token since we need to approve the tokens to spend - * from the paymaster address on behalf of you. - */ - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - let uoHash1; - let transactionBatch; - let op; - let uoHash; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - - try { - assert.isNotEmpty( - paymasterAddress, - message.vali_pimlico_paymasterAddress_1 - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - let contract; - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - - contract = await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: encodedData, - }); - - try { - assert.isNotEmpty(contract.to, message.vali_erc20Contract_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(contract.data, message.vali_erc20Contract_data); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get estimation of transaction - try { - approveOp = await optimismMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - approveOp.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - uoHash1 = await optimismMainNetSdk.send(approveOp); - - try { - assert.isNotEmpty(uoHash1, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await optimismMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await optimismMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - } else { - addContext(test, message.fail_paymasterAddress_1); - assert.fail(message.fail_paymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer token with arka paymaster with validUntil and validAfter on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let balance; - let transactionBatch; - let op; - let uoHash; - - // get balance of the account address - try { - balance = await optimismMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await optimismMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await optimismMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the metadata of the arka paymaster on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the metadata - try { - let metadata = await arkaPaymaster.metadata(); - - try { - assert.isNotEmpty( - metadata.sponsorAddress, - message.vali_metadata_sponsorAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.sponsorWalletBalance, - message.vali_metadata_sponsorWalletBalance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.chainsSupported, - message.vali_metadata_chainsSupported - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.tokenPaymasters, - message.vali_metadata_tokenPaymasters - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_metadata_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get token paymaster address function of the arka paymaster on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the get token paymaster address - try { - let getTokenPaymasterAddress = - await arkaPaymaster.getTokenPaymasterAddress('USDC'); - - try { - assert.isNotEmpty( - getTokenPaymasterAddress, - message.vali_getTokenPaymasterAddress_tokenPaymasterAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the remove whitelist address function of the arka paymaster on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the remove whitelist address - try { - let removeWhitelist = await arkaPaymaster.removeWhitelist([ - data.sender, - ]); - - if (removeWhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the add whitelist address function of the arka paymaster on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the add whitelist address - try { - let addWhitelist = await arkaPaymaster.addWhitelist([data.sender]); - - if (addWhitelist.includes(constant.add_whitelist_1)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the check whitelist function of the arka paymaster on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the whilelist address - try { - let checkWhitelist = await arkaPaymaster.checkWhitelist(data.sender); - - if (checkWhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkWhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the deposit function of the arka paymaster on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the deposit - try { - let deposit = await arkaPaymaster.deposit(data.value); - - if (deposit.includes(constant.deposit_1)) { - addContext(test, message.vali_deposit_1); - console.log(message.vali_deposit_1); - } else { - addContext(test, message.fail_deposit_1); - assert.fail(message.fail_deposit_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid arka paymaster url on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: data.invalid_paymaster_arka, // invalid URL - api_key: process.env.API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - if (e.message === constant.not_found) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid API Key of arka paymaster on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INVALID_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with incorrect API Key of arka paymaster on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INCORRECT_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_5); - assert.fail(message.fail_estimateTransaction_5); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_4); - console.log(message.vali_estimateTransaction_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without API Key of arka paymaster on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - // without api_key - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL on the optimism network', async function () { - var test = this; - const invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${invalid_arka_url}/pimlicoAddress${queryString}`, // invalid paymaster URL - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - - if (paymasterAddress.includes(constant.not_found_2)) { - addContext(test, message.vali_pimlico_paymasterAddress_2); - console.log(message.vali_pimlico_paymasterAddress_2); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_2); - assert.fail(message.fail_pimlico_paymasterAddress_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid API Key in queryString on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.INVALID_API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_3); - console.log(message.vali_pimlico_paymasterAddress_3); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_3); - assert.fail(message.fail_pimlico_paymasterAddress_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without API Key in queryString on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?chainId=${Number(data.optimism_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_4); - console.log(message.vali_pimlico_paymasterAddress_4); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_4); - assert.fail(message.fail_pimlico_paymasterAddress_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid ChainID in queryString on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_optimism_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_3) { - addContext(test, message.vali_pimlico_paymasterAddress_5); - console.log(message.vali_pimlico_paymasterAddress_5); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_5); - assert.fail(message.fail_pimlico_paymasterAddress_5); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without ChainID in queryString on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - // without chainid in queryString - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_6); - console.log(message.vali_pimlico_paymasterAddress_6); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_6); - assert.fail(message.fail_pimlico_paymasterAddress_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Entry Point Address while fetching the paymaster address on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.invalidEntryPointAddress, // invalid entry point address - { token: data.usdc_token }, - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - const errorMessage = returnedValue.error; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_pimlico_paymasterAddress_7); - console.log(message.vali_pimlico_paymasterAddress_7); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_7); - assert.fail(message.fail_pimlico_paymasterAddress_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token while fetching the paymaster address on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.entryPointAddress, - { token: data.invalid_usdc_token }, // invalid token - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_1) { - addContext(test, message.vali_pimlico_paymasterAddress_8); - console.log(message.vali_pimlico_paymasterAddress_8); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_8); - assert.fail(message.fail_pimlico_paymasterAddress_8); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without parameters while fetching the paymaster address on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [], // without parametets - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_9); - console.log(message.vali_pimlico_paymasterAddress_9); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_9); - assert.fail(message.fail_pimlico_paymasterAddress_9); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect token address of the erc20 contract on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.incorrectTokenAddress_optimismUSDC, // incorrect token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_1); - console.log(message.vali_erc20Contract_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token address of the erc20 contract on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.invalidTokenAddress_optimismUSDC, // invalid token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_2); - console.log(message.vali_erc20Contract_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster address of the erc20 contract on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.invalid_paymasterAddress, ethers.constants.MaxUint256] // invalid paymaster address - ); - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Contract_3); - console.log(message.vali_erc20Contract_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_5); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect paymaster address of the erc20 contract on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.incorrect_paymasterAddress, ethers.constants.MaxUint256] // incorrect paymaster address - ); - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Contract_4); - console.log(message.vali_erc20Contract_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid value of the transactions on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await optimismMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await optimismMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.invalidValue), - }); - - addContext(test, message.fail_addTransaction_2); - assert.fail(message.fail_addTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_value_1)) { - addContext(test, vali_addTransaction_1); - console.log(vali_addTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL while estimate the transactions on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await optimismMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await optimismMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Api Key while estimate the transactions on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.optimism_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await optimismMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await optimismMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without Api Key while estimate the transactions on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - let invalid_queryString = `?chainId=${Number(data.optimism_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await optimismMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await optimismMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid chainid while estimate the transactions on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_optimism_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await optimismMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await optimismMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without chainid while estimate the transactions on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await optimismMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await optimismMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid paymaster URL on the optimism network', async function () { - var test = this; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.optimism_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid API Token on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.optimism_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without API Token on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?chainId=${Number(data.optimism_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid ChainID on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_optimism_chainid - )}`; // invalid ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without ChainID on the optimism network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await optimismMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get token paymaster address function of the arka paymaster with incorrect token on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(data.invalid_usdc_token); - - addContext(test, message.fail_getTokenPaymasterAddress_2); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_1)) { - addContext(test, message.vali_getTokenPaymasterAddress_1); - console.log(message.vali_getTokenPaymasterAddress_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get token paymaster address function of the arka paymaster without token on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(); - - addContext(test, message.fail_getTokenPaymasterAddress_3); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_getTokenPaymasterAddress_2); - console.log(message.vali_getTokenPaymasterAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with invalid address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.invalidSender]); - - addContext(test, message.fail_removeWhitelist_3); - assert.fail(message.fail_removeWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_3); - console.log(message.vali_removeWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with incorrect address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.incorrectSender]); - - addContext(test, message.fail_removeWhitelist_4); - assert.fail(message.fail_removeWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_4); - console.log(message.vali_removeWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([randomAddress.address]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random and whitelisted addresses on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress.address, - data.sender, - ]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple whitelisted addresses on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // make whitelisted addresses - await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - // remove whitelist addresses - let removewhitelist = await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (removewhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple random addresses on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - addContext(test, message.fail_removeWhitelist_6); - assert.fail(message.fail_removeWhitelist_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with invalid address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.invalidSender]); - - addContext(test, message.fail_addWhitelist_3); - assert.fail(message.fail_addWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_3); - console.log(message.vali_addWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with incorrect address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.incorrectSender]); - - addContext(test, message.fail_addWhitelist_4); - assert.fail(message.fail_addWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_4); - console.log(message.vali_addWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with random address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_5); - console.log(message.vali_addWhitelist_5); - } else { - addContext(test, message.fail_addWhitelist_7); - assert.fail(message.fail_addWhitelist_7); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with random and whitelisted addresses on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.addWhitelist([ - randomAddress.address, - data.sender, - ]); - - if (addWhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the add whitelist address function of the arka paymaster with multiple whitelisted addresses on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // add whitelist addresses - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the check whitelist function of the arka paymaster with invalid address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_2); - assert.fail(message.fail_checkWhitelist_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_1); - console.log(message.vali_checkWhitelist_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the check whitelist function of the arka paymaster with incorrect address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_3); - assert.fail(message.fail_checkWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_2); - console.log(message.vali_checkWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the check whitelist function of the arka paymaster with random address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let checkwhitelist = await arkaPaymaster.checkWhitelist( - randomAddress.address - ); - - if (checkwhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkwhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the check whitelist function of the arka paymaster without address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(); - - addContext(test, message.fail_checkWhitelist_5); - assert.fail(message.fail_checkWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_3); - console.log(message.vali_checkWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the deposit function of the arka paymaster with invalid amount on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the deposit - try { - await arkaPaymaster.deposit('one'); - - addContext(test, message.fail_deposit_3); - assert.fail(message.fail_deposit_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_deposit_2); - console.log(message.vali_deposit_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/paymaster/xdai.spec.js b/test/specs/mainnet/paymaster/xdai.spec.js deleted file mode 100644 index f7303f5..0000000 --- a/test/specs/mainnet/paymaster/xdai.spec.js +++ /dev/null @@ -1,4309 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { - PrimeSdk, - DataUtils, - EtherspotBundler, - ArkaPaymaster, -} from '@etherspot/prime-sdk'; -import { ethers, utils } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let xdaiMainNetSdk; -let xdaiEtherspotWalletAddress; -let xdaiNativeAddress = null; -let xdaiDataService; -let arkaPaymaster; -let runTest; - -/* eslint-disable prettier/prettier */ -describe('The PrimeSDK, when transaction with arka and pimlico paymasters with xdai network on the MainNet (with old wallet)', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - xdaiMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - xdaiEtherspotWalletAddress = - await xdaiMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - xdaiEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - xdaiDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // initializating ArkaPaymaster... - try { - arkaPaymaster = new ArkaPaymaster( - Number(data.xdai_chainid), - process.env.API_KEY, - data.paymaster_arka - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_arka_initialize); - } - - // validate the balance of the wallet - try { - let output = await xdaiDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === xdaiNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_xdaiUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Perform the transfer native token on arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `https://arka.etherspot.io?apiKey=${ - process.env.API_KEY_ARKA - }&chainId=${Number(data.xdai_chainid)}`, - context: { mode: 'sponsor' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer token with arka pimlico paymaster on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY_ARKA}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - // wait for the execution - helper.wait(data.longTimeout); - - let balance; - // get balance of the account address - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /** - * The fetching of pimlico erc20 paymaster address is only required for the first time for each specified gas token since we need to approve the tokens to spend - * from the paymaster address on behalf of you. - */ - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - let uoHash1; - let transactionBatch; - let op; - let uoHash; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - console.log('returnedValue:::::::::::', returnedValue); - - paymasterAddress = returnedValue.message; - - try { - assert.isNotEmpty( - paymasterAddress, - message.vali_pimlico_paymasterAddress_1 - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - let contract; - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - - contract = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - - try { - assert.isNotEmpty(contract.to, message.vali_erc20Contract_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(contract.data, message.vali_erc20Contract_data); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get estimation of transaction - try { - approveOp = await xdaiMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - approveOp.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - uoHash1 = await xdaiMainNetSdk.send(approveOp); - - try { - assert.isNotEmpty(uoHash1, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash... - console.log('Waiting for transaction...'); - let userOpsReceipt1 = null; - const timeout1 = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt1 == null && Date.now() < timeout1) { - helper.wait(data.mediumTimeout); - userOpsReceipt1 = await xdaiMainNetSdk.getUserOpReceipt(uoHash1); - } - - // wait for the execution - helper.wait(data.longTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - } else { - addContext(test, message.fail_paymasterAddress_1); - assert.fail(message.fail_paymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer token with arka paymaster with validUntil and validAfter on the XDAI network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - let balance; - let transactionBatch; - let op; - let uoHash; - - // get balance of the account address - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the metadata of the arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the metadata - try { - let metadata = await arkaPaymaster.metadata(); - - try { - assert.isNotEmpty( - metadata.sponsorAddress, - message.vali_metadata_sponsorAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.sponsorWalletBalance, - message.vali_metadata_sponsorWalletBalance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.chainsSupported, - message.vali_metadata_chainsSupported - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.tokenPaymasters, - message.vali_metadata_tokenPaymasters - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_metadata_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the get token paymaster address function of the arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the get token paymaster address - try { - let getTokenPaymasterAddress = - await arkaPaymaster.getTokenPaymasterAddress('USDC'); - - try { - assert.isNotEmpty( - getTokenPaymasterAddress, - message.vali_getTokenPaymasterAddress_tokenPaymasterAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the remove whitelist address function of the arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the remove whitelist address - try { - let removeWhitelist = await arkaPaymaster.removeWhitelist([ - data.sender, - ]); - - if (removeWhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the add whitelist address function of the arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the add whitelist address - try { - let addWhitelist = await arkaPaymaster.addWhitelist([data.sender]); - - if (addWhitelist.includes(constant.add_whitelist_1)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the check whitelist function of the arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the whilelist address - try { - let checkWhitelist = await arkaPaymaster.checkWhitelist(data.sender); - - if (checkWhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkWhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the deposit function of the arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the deposit - try { - let deposit = await arkaPaymaster.deposit(data.value); - - if (deposit.includes(constant.deposit_1)) { - addContext(test, message.vali_deposit_1); - console.log(message.vali_deposit_1); - } else { - addContext(test, message.fail_deposit_1); - assert.fail(message.fail_deposit_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid arka paymaster url on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: data.invalid_paymaster_arka, // invalid URL - api_key: process.env.API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - if (e.message === constant.not_found) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid API Key of arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INVALID_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with incorrect API Key of arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INCORRECT_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_5); - assert.fail(message.fail_estimateTransaction_5); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_4); - console.log(message.vali_estimateTransaction_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without API Key of arka paymaster on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - // without api_key - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL on the xdai network', async function () { - var test = this; - const invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${invalid_arka_url}/pimlicoAddress${queryString}`, // invalid paymaster URL - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - - if (paymasterAddress.includes(constant.not_found_2)) { - addContext(test, message.vali_pimlico_paymasterAddress_2); - console.log(message.vali_pimlico_paymasterAddress_2); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_2); - assert.fail(message.fail_pimlico_paymasterAddress_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid API Key in queryString on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.INVALID_API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_3); - console.log(message.vali_pimlico_paymasterAddress_3); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_3); - assert.fail(message.fail_pimlico_paymasterAddress_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without API Key in queryString on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?chainId=${Number(data.xdai_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_4); - console.log(message.vali_pimlico_paymasterAddress_4); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_4); - assert.fail(message.fail_pimlico_paymasterAddress_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid ChainID in queryString on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_xdai_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_3) { - addContext(test, message.vali_pimlico_paymasterAddress_5); - console.log(message.vali_pimlico_paymasterAddress_5); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_5); - assert.fail(message.fail_pimlico_paymasterAddress_5); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without ChainID in queryString on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - // without chainid in queryString - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_6); - console.log(message.vali_pimlico_paymasterAddress_6); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_6); - assert.fail(message.fail_pimlico_paymasterAddress_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Entry Point Address while fetching the paymaster address on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.invalidEntryPointAddress, // invalid entry point address - { token: data.usdc_token }, - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - const errorMessage = returnedValue.error; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_pimlico_paymasterAddress_7); - console.log(message.vali_pimlico_paymasterAddress_7); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_7); - assert.fail(message.fail_pimlico_paymasterAddress_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token while fetching the paymaster address on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.entryPointAddress, - { token: data.invalid_usdc_token }, // invalid token - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_1) { - addContext(test, message.vali_pimlico_paymasterAddress_8); - console.log(message.vali_pimlico_paymasterAddress_8); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_8); - assert.fail(message.fail_pimlico_paymasterAddress_8); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without parameters while fetching the paymaster address on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [], // without parametets - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_9); - console.log(message.vali_pimlico_paymasterAddress_9); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_9); - assert.fail(message.fail_pimlico_paymasterAddress_9); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect token address of the erc20 contract on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.incorrectTokenAddress_xdaiUSDC, // incorrect token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_1); - console.log(message.vali_erc20Contract_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token address of the erc20 contract on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.invalidTokenAddress_xdaiUSDC, // invalid token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_2); - console.log(message.vali_erc20Contract_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster address of the erc20 contract on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.invalid_paymasterAddress, ethers.constants.MaxUint256] // invalid paymaster address - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Contract_3); - console.log(message.vali_erc20Contract_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_5); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect paymaster address of the erc20 contract on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.incorrect_paymasterAddress, ethers.constants.MaxUint256] // incorrect paymaster address - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Contract_4); - console.log(message.vali_erc20Contract_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid value of the transactions on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await xdaiMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.invalidValue), - }); - - addContext(test, message.fail_addTransaction_2); - assert.fail(message.fail_addTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_value_1)) { - addContext(test, vali_addTransaction_1); - console.log(vali_addTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL while estimate the transactions on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await xdaiMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Api Key while estimate the transactions on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.xdai_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await xdaiMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without Api Key while estimate the transactions on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - let invalid_queryString = `?chainId=${Number(data.xdai_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await xdaiMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid chainid while estimate the transactions on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_xdai_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await xdaiMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without chainid while estimate the transactions on the xdai network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await xdaiMainNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid paymaster URL on the XDAI network', async function () { - var test = this; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.xdai_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid API Token on the XDAI network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.xdai_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without API Token on the XDAI network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?chainId=${Number(data.xdai_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid ChainID on the XDAI network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_xdai_chainid - )}`; // invalid ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without ChainID on the XDAI network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await xdaiMainNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the get token paymaster address function of the arka paymaster with incorrect token on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(data.invalid_usdc_token); - - addContext(test, message.fail_getTokenPaymasterAddress_2); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_1)) { - addContext(test, message.vali_getTokenPaymasterAddress_1); - console.log(message.vali_getTokenPaymasterAddress_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the get token paymaster address function of the arka paymaster without token on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(); - - addContext(test, message.fail_getTokenPaymasterAddress_3); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_getTokenPaymasterAddress_2); - console.log(message.vali_getTokenPaymasterAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with invalid address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.invalidSender]); - - addContext(test, message.fail_removeWhitelist_3); - assert.fail(message.fail_removeWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_3); - console.log(message.vali_removeWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with incorrect address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.incorrectSender]); - - addContext(test, message.fail_removeWhitelist_4); - assert.fail(message.fail_removeWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_4); - console.log(message.vali_removeWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([randomAddress.address]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random and whitelisted addresses on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress.address, - data.sender, - ]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple whitelisted addresses on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // make whitelisted addresses - await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - // remove whitelist addresses - let removewhitelist = await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (removewhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple random addresses on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - addContext(test, message.fail_removeWhitelist_6); - assert.fail(message.fail_removeWhitelist_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with invalid address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.invalidSender]); - - addContext(test, message.fail_addWhitelist_3); - assert.fail(message.fail_addWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_3); - console.log(message.vali_addWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with incorrect address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.incorrectSender]); - - addContext(test, message.fail_addWhitelist_4); - assert.fail(message.fail_addWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_4); - console.log(message.vali_addWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with random address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_5); - console.log(message.vali_addWhitelist_5); - } else { - addContext(test, message.fail_addWhitelist_7); - assert.fail(message.fail_addWhitelist_7); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with random and whitelisted addresses on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.addWhitelist([ - randomAddress.address, - data.sender, - ]); - - if (addWhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with multiple whitelisted addresses on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // add whitelist addresses - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster with invalid address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_2); - assert.fail(message.fail_checkWhitelist_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_1); - console.log(message.vali_checkWhitelist_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster with incorrect address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_3); - assert.fail(message.fail_checkWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_2); - console.log(message.vali_checkWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster with random address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let checkwhitelist = await arkaPaymaster.checkWhitelist( - randomAddress.address - ); - - if (checkwhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkwhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster without address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(); - - addContext(test, message.fail_checkWhitelist_5); - assert.fail(message.fail_checkWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_3); - console.log(message.vali_checkWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the deposit function of the arka paymaster with invalid amount on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the deposit - try { - await arkaPaymaster.deposit('one'); - - addContext(test, message.fail_deposit_3); - assert.fail(message.fail_deposit_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_deposit_2); - console.log(message.vali_deposit_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/postcondition/postcondition_newWallet.spec.js b/test/specs/mainnet/postcondition/postcondition_newWallet.spec.js index de82985..b3c93b4 100644 --- a/test/specs/mainnet/postcondition/postcondition_newWallet.spec.js +++ b/test/specs/mainnet/postcondition/postcondition_newWallet.spec.js @@ -5,6 +5,12 @@ import { assert } from 'chai'; import addContext from 'mochawesome/addContext.js'; import { ethers } from 'ethers'; import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import { + randomChainId, + randomChainName, + randomProviderNetwork, + randomTokenAddress, +} from '../../../utils/sharedData_mainnet.js'; import customRetryAsync from '../../../utils/baseTest.js'; import helper from '../../../utils/helper.js'; import data from '../../../data/testData.json' assert { type: 'json' }; @@ -14,255 +20,271 @@ import { fileURLToPath } from 'url'; import fs from 'fs'; import path from 'path'; -let xdaiMainNetSdk; +let mainnetPrimeSdk; +let mainnetPrimeSdk_old; const __dirname = dirname(fileURLToPath(import.meta.url)); describe('Perform the postcondition for new wallet fund', function () { - it('POSTCONDITION1: Initialize the modular sdk for new private key on the xdai network', async function () { - const filePath = path.join(__dirname, '../../../utils/testUtils.json'); - const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + it( + 'POSTCONDITION1: Initialize the modular sdk for new private key on the ' + + randomChainName + + ' network', + async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); - var test = this; - await customRetryAsync(async function () { - // wait for the execution - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { - privateKey: sharedState.newPrivateKey, - }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - }, data.retry); // Retry this async test up to 3 times - }); + var test = this; + await customRetryAsync(async function () { + // wait for the execution + helper.wait(data.mediumTimeout); - it('POSTCONDITION2: Perform the transfer ERC20 token from new wallet to old wallet on the xdai network', async function () { - var test = this; + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { + privateKey: sharedState.newPrivateKey, + }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + }, data.retry); // Retry this async test up to 3 times + } + ); - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); + it( + 'POSTCONDITION2: Perform the transfer ERC20 token from new wallet to old wallet on the ' + + randomChainName + + ' network', + async function () { + var test = this; - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } - // get transferFrom encoded data - let transactionData; - balance = balance - 0.001; - const balanceStr = balance.toFixed(3); - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.sender, - ethers.utils.parseUnits(balanceStr, data.erc20_usdc_decimal), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_transferFrom); - } + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } + // get transferFrom encoded data + let transactionData; + balance = balance - 0.001; + const balanceStr = balance.toFixed(3); + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.sender, + ethers.utils.parseUnits(balanceStr, data.erc20_usdc_decimal), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_transferFrom); + } - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); + assert.fail(message.fail_estimateTransaction_1); } - } - // get transaction hash... - try { - console.log('Waiting for transaction...'); - let userOpsReceipt = null; - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - helper.wait(data.mediumTimeout); - userOpsReceipt = await xdaiMainNetSdk_old.getUserOpReceipt(uoHash); + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - }); - it('POSTCONDITION3: Perform the transfer native token from new wallet to old wallet on the xdai network', async function () { - var test = this; - await customRetryAsync(async function () { - helper.wait(data.longTimeout); + // get transaction hash... + try { + console.log('Waiting for transaction...'); + let userOpsReceipt = null; + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + helper.wait(data.mediumTimeout); + userOpsReceipt = await mainnetPrimeSdk_old.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + }, data.retry); // Retry this async test up to 5 times + } + ); - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } + it( + 'POSTCONDITION3: Perform the transfer native token from new wallet to old wallet on the ' + + randomChainName + + ' network', + async function () { + var test = this; + await customRetryAsync(async function () { + helper.wait(data.longTimeout); - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } - // add transactions to the batch - let transactionBatch; - try { - balance = balance - 0.0001; - const balanceStr = balance.toFixed(3); + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.sender, - value: ethers.utils.parseEther(balanceStr), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } + // add transactions to the batch + let transactionBatch; + try { + balance = balance - 0.0001; + const balanceStr = balance.toFixed(3); - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.sender, + value: ethers.utils.parseEther(balanceStr), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); + assert.fail(message.fail_estimateTransaction_1); } - } - // get transaction hash... - console.log('Waiting for transaction...'); - let userOpsReceipt = null; - const timeout = Date.now() + 1200000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - helper.wait(data.mediumTimeout); - userOpsReceipt = await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - }, data.retry); // Retry this async test up to 3 times - }); + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash... + console.log('Waiting for transaction...'); + let userOpsReceipt = null; + const timeout = Date.now() + 1200000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + helper.wait(data.mediumTimeout); + userOpsReceipt = await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + }, data.retry); // Retry this async test up to 3 times + } + ); }); diff --git a/test/specs/mainnet/precondition/precondition_newWallet.spec.js b/test/specs/mainnet/precondition/precondition_newWallet.spec.js index 76280c7..0369eb0 100644 --- a/test/specs/mainnet/precondition/precondition_newWallet.spec.js +++ b/test/specs/mainnet/precondition/precondition_newWallet.spec.js @@ -6,9 +6,14 @@ import { assert } from 'chai'; import addContext from 'mochawesome/addContext.js'; import { ethers } from 'ethers'; import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import { + randomChainId, + randomChainName, + randomProviderNetwork, + randomTokenAddress, +} from '../../../utils/sharedData_mainnet.js'; import customRetryAsync from '../../../utils/baseTest.js'; import helper from '../../../utils/helper.js'; -import testUtils from '../../../utils/testUtils.js'; import data from '../../../data/testData.json' assert { type: 'json' }; import message from '../../../data/messages.json' assert { type: 'json' }; import { dirname } from 'path'; @@ -16,375 +21,401 @@ import { fileURLToPath } from 'url'; import fs from 'fs'; import path from 'path'; -let xdaiMainNetSdk_old; -let xdaiMainNetSdk; +let mainnetPrimeSdk_old; +let mainnetPrimeSdk; let primeAccountAddress; const __dirname = dirname(fileURLToPath(import.meta.url)); describe('Perform the precondition for new wallet generation', function () { - it('PRECONDITION1: Create random private key on the xdai network', async function () { - // Generate a random private key - const randomPrivateKeyString = randomPrivateKey(); + it( + 'PRECONDITION1: Create random private key on the ' + + randomChainName + + ' network', + async function () { + // Generate a random private key + const randomPrivateKeyString = randomPrivateKey(); - console.log('randomPrivateKeyString', randomPrivateKeyString); + console.log('randomPrivateKeyString', randomPrivateKeyString); - // Store privatekey in utility + // Store privatekey in utility - const valueToPersist = { newPrivateKey: randomPrivateKeyString }; - const filePath = path.join(__dirname, '../../../utils/testUtils.json'); - fs.writeFileSync(filePath, JSON.stringify(valueToPersist)); - }); - - it('PRECONDITION2: Initialize the prime sdk for new private key on the xdai network', async function () { - var test = this; - await customRetryAsync(async function () { + const valueToPersist = { newPrivateKey: randomPrivateKeyString }; const filePath = path.join(__dirname, '../../../utils/testUtils.json'); - const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); - - // wait for the execution - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: sharedState.newPrivateKey }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), + fs.writeFileSync(filePath, JSON.stringify(valueToPersist)); + } + ); + + it( + 'PRECONDITION2: Initialize the prime sdk for new private key on the ' + + randomChainName + + ' network', + async function () { + var test = this; + await customRetryAsync(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + // wait for the execution + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get modular account address + try { + primeAccountAddress = + await mainnetPrimeSdk.getCounterFactualAddress(); + + console.log('primeAccountAddress', primeAccountAddress); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + }, data.retry); // Retry this async test up to 3 times + } + ); + + it( + 'PRECONDITION3: Perform the transfer native token from old wallet to new wallet on the ' + + randomChainName + + ' network', + async function () { + var test = this; + await customRetryAsync(async function () { + // wait for the execution + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk_old = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // wait for the execution + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk_old.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + + try { + transactionBatch = await mainnetPrimeSdk_old.addUserOpsToBatch({ + to: primeAccountAddress, + value: ethers.utils.parseEther(data.newWallet_value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk_old.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk_old.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); } - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get modular account address - try { - primeAccountAddress = await xdaiMainNetSdk.getCounterFactualAddress(); - - console.log('primeAccountAddress', primeAccountAddress); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - }, data.retry); // Retry this async test up to 3 times - }); - - it('PRECONDITION3: Perform the transfer native token from old wallet to new wallet on the xdai network', async function () { - var test = this; - await customRetryAsync(async function () { - // wait for the execution - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk_old = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), + } + + // get transaction hash... + try { + console.log('Waiting for transaction...'); + let userOpsReceipt = null; + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + helper.wait(data.mediumTimeout); + userOpsReceipt = await mainnetPrimeSdk_old.getUserOpReceipt(uoHash); } - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // wait for the execution - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk_old.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - - try { - transactionBatch = await xdaiMainNetSdk_old.addUserOpsToBatch({ - to: primeAccountAddress, - value: ethers.utils.parseEther(data.newWallet_value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk_old.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk_old.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { + } catch (e) { + console.error(e); + const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_submitTransaction_1); } - } - - // get transaction hash... - try { - console.log('Waiting for transaction...'); - let userOpsReceipt = null; - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - helper.wait(data.mediumTimeout); - userOpsReceipt = await xdaiMainNetSdk_old.getUserOpReceipt(uoHash); + }, data.retry); // Retry this async test up to 3 times + } + ); + + it( + 'PRECONDITION4: Perform the transfer ERC20 token from old wallet to new wallet on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - }, data.retry); // Retry this async test up to 3 times - }); - - it('PRECONDITION4: Perform the transfer ERC20 token from old wallet to new wallet on the xdai network', async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - primeAccountAddress, - ethers.utils.parseUnits( - data.newWallet_erc20value, - data.erc20_usdc_decimal - ), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_transferFrom); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); + assert.fail(message.fail_erc20Transfer_contractInterface); } - } - - // get transaction hash... - try { - console.log('Waiting for transaction...'); - let userOpsReceipt = null; - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - helper.wait(data.mediumTimeout); - userOpsReceipt = await xdaiMainNetSdk_old.getUserOpReceipt(uoHash); + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + primeAccountAddress, + ethers.utils.parseUnits( + data.newWallet_erc20value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_transferFrom); } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('PRECONDITION5: Perform the transfer native token from new wallet to old wallet on the xdai network', async function () { - var test = this; - - await customRetryAsync(async function () { - // wait for the execution - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.sender, - value: ethers.utils.parseEther(balance), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash... + try { + console.log('Waiting for transaction...'); + let userOpsReceipt = null; + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + helper.wait(data.mediumTimeout); + userOpsReceipt = await mainnetPrimeSdk_old.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_submitTransaction_1); } - } - - // get transaction hash... - try { - console.log('Waiting for transaction...'); - let userOpsReceipt = null; - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - helper.wait(data.mediumTimeout); - userOpsReceipt = await xdaiMainNetSdk_old.getUserOpReceipt(uoHash); + }, data.retry); // Retry this async test up to 5 times + } + ); + + it( + 'PRECONDITION5: Perform the transfer native token from new wallet to old wallet on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + await customRetryAsync(async function () { + // wait for the execution + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.sender, + value: ethers.utils.parseEther(balance), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash... + try { + console.log('Waiting for transaction...'); + let userOpsReceipt = null; + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + helper.wait(data.mediumTimeout); + userOpsReceipt = await mainnetPrimeSdk_old.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - }); + }, data.retry); // Retry this async test up to 5 times + } + ); }); diff --git a/test/specs/mainnet/swap/arbitrum.spec.js b/test/specs/mainnet/swap/arbitrum.spec.js deleted file mode 100644 index 298e0e6..0000000 --- a/test/specs/mainnet/swap/arbitrum.spec.js +++ /dev/null @@ -1,2206 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils, constants, BigNumber } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let arbitrumMainNetSdk; -let arbitrumEtherspotWalletAddress; -let arbitrumNativeAddress = null; -let arbitrumDataService; -let runTest; - -describe('The PrimeSDK, when get cross chain quotes and get advance routes LiFi transaction details with arbitrum network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - // initializating sdk - try { - arbitrumMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.arbitrum_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.arbitrum_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - arbitrumMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - arbitrumEtherspotWalletAddress = - await arbitrumMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - arbitrumEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - arbitrumDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await arbitrumDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === arbitrumNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_arbitrumUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the Exchange offers response with ERC20 to ERC20 and valid details on the arbitrum network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await arbitrumDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - let offers; - try { - let fromChainId = data.arbitrum_chainid; - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_arbitrumUSDC; - let toTokenAddress = data.tokenAddress_arbitrumUSDT; - let fromAmount = data.exchange_offer_value; - - offers = await arbitrumDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - if (offers.length > 0) { - for (let i = 0; i < offers.length; i++) { - try { - assert.isNotEmpty( - offers[i].provider, - message.vali_exchangeOffers_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].receiveAmount, - message.vali_exchangeOffers_receiveAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - offers[i].exchangeRate, - message.vali_exchangeOffers_exchangeRate - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].transactions, - message.vali_exchangeOffers_transactions - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } else { - addContext(test, message.vali_exchangeOffers_3); - console.log(message.vali_exchangeOffers_3); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Exchange offers response with ERC20 to Native Token and valid details on the arbitrum network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await arbitrumDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - let offers; - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_arbitrumUSDC; - let toTokenAddress = constants.AddressZero; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.arbitrum_chainid; - - offers = await arbitrumDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - if (offers.length > 0) { - for (let i = 0; i < offers.length; i++) { - try { - assert.isNotEmpty( - offers[i].provider, - message.vali_exchangeOffers_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].receiveAmount, - message.vali_exchangeOffers_receiveAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - offers[i].exchangeRate, - message.vali_exchangeOffers_exchangeRate - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].transactions, - message.vali_exchangeOffers_transactions - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } else { - addContext(test, message.vali_exchangeOffers_3); - console.log(message.vali_exchangeOffers_3); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the getCrossChainQuotes response with valid details on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - let quotes; - try { - quoteRequestPayload = { - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - quotes = - await arbitrumDataService.getCrossChainQuotes(quoteRequestPayload); - - if (quotes.items.length > 0) { - try { - assert.isNotEmpty( - quotes.items[0].provider, - message.vali_crossChainQuotes_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.data, - message.vali_crossChainQuotes_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.to, - message.vali_crossChainQuotes_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.value, - message.vali_crossChainQuotes_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.from, - message.vali_crossChainQuotes_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].transaction.chainId, - message.vali_crossChainQuotes_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_crossChainQuotes_1); - console.log(message.vali_crossChainQuotes_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the getAdvanceRoutesLiFi response with valid details on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - let quotes; - let stepTransaction; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - quotes = - await arbitrumDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - if (quotes.items.length > 0) { - const quote = quotes.items[0]; // Selected the first route - stepTransaction = await arbitrumDataService.getStepTransaction({ - route: quote, - account: data.sender, - }); - - try { - assert.isNotEmpty( - quotes.items[0].id, - message.vali_advanceRoutesLiFi_id - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].fromChainId, - message.vali_advanceRoutesLiFi_fromChainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAmountUSD, - message.vali_advanceRoutesLiFi_fromAmountUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAmount, - message.vali_advanceRoutesLiFi_fromAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromToken, - message.vali_advanceRoutesLiFi_fromToken - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAddress, - message.vali_advanceRoutesLiFi_fromAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].toChainId, - message.vali_advanceRoutesLiFi_toChainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmountUSD, - message.vali_advanceRoutesLiFi_toAmountUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmount, - message.vali_advanceRoutesLiFi_toAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmountMin, - message.vali_advanceRoutesLiFi_toAmountMin - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toToken, - message.vali_advanceRoutesLiFi_toToken - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAddress, - message.vali_advanceRoutesLiFi_toAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].gasCostUSD, - message.vali_advanceRoutesLiFi_gasCostUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].data, - message.vali_stepTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].value, - message.vali_stepTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].to, - message.vali_stepTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - stepTransaction.items[0].chainId, - message.vali_stepTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].transactionType, - message.vali_stepTransaction_transactionType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].transactionType, - message.vali_stepTransaction_transactionType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].data, - message.vali_stepTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].to, - message.vali_stepTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].value, - message.vali_stepTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - stepTransaction.items[1].chainId, - message.vali_stepTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].gasLimit, - message.vali_stepTransaction_gasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].gasPrice, - message.vali_stepTransaction_gasPrice - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_advanceRoutesLiFi_1); - console.log(message.vali_advanceRoutesLiFi_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_advanceRoutesLiFi_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid fromTokenAddress details on the arbitrum network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await arbitrumDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.invalidTokenAddress_arbitrumUSDC; // Invalid fromTokenAddress - let toTokenAddress = data.tokenAddress_arbitrumUSDT; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.arbitrum_chainid; - - await arbitrumDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_2); - assert.fail(message.fail_exchangeOffers_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_exchangeOffers_4); - console.log(message.vali_exchangeOffers_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without fromTokenAddress details on the arbitrum network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await arbitrumDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let toTokenAddress = data.tokenAddress_arbitrumUSDT; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.arbitrum_chainid; - - await arbitrumDataService.getExchangeOffers({ - fromAddress, - fromChainId, - // without fromTokenAddress - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_3); - assert.fail(message.fail_exchangeOffers_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_exchangeOffers_5); - console.log(message.vali_exchangeOffers_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid toTokenAddress details on the arbitrum network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await arbitrumDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_arbitrumUSDC; - let toTokenAddress = data.invalidTokenAddress_arbitrumUSDT; // Invalid toTokenAddress - let fromAmount = data.exchange_offer_value; - let fromChainId = data.arbitrum_chainid; - - await arbitrumDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_4); - assert.fail(message.fail_exchangeOffers_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_exchangeOffers_6); - console.log(message.vali_exchangeOffers_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without toTokenAddress details on the arbitrum network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await arbitrumDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_arbitrumUSDC; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.arbitrum_chainid; - - await arbitrumDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - // without toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_5); - assert.fail(message.fail_exchangeOffers_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_exchangeOffers_7); - console.log(message.vali_exchangeOffers_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid fromAmount on the arbitrum network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await arbitrumDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_arbitrumUSDC; - let toTokenAddress = data.tokenAddress_arbitrumUSDT; - let fromAmount = data.invalidValue; // invalid fromAmount - let fromChainId = data.arbitrum_chainid; - - await arbitrumDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_6); - assert.fail(message.fail_exchangeOffers_6); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_exchangeOffers_8); - console.log(message.vali_exchangeOffers_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with decimal fromAmount on the arbitrum network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await arbitrumDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_arbitrumUSDC; - let toTokenAddress = data.tokenAddress_arbitrumUSDT; - let fromAmount = data.exchange_offer_decimal_value; // decimal fromAmount - let fromChainId = data.arbitrum_chainid; - - await arbitrumDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_7); - assert.fail(message.fail_exchangeOffers_7); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_exchangeOffers_9); - console.log(message.vali_exchangeOffers_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without fromAmount on the arbitrum network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await arbitrumDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_arbitrumUSDC; - let toTokenAddress = data.tokenAddress_arbitrumUSDT; - let fromChainId = data.arbitrum_chainid; - - await arbitrumDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - // without fromAmount - }); - - addContext(test, message.fail_exchangeOffers_9); - assert.fail(message.fail_exchangeOffers_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.IsBigNumberish === - constant.invalid_bignumber_2 - ) { - addContext(test, message.vali_exchangeOffers_11); - console.log(message.vali_exchangeOffers_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromChainId detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_2); - assert.fail(message.fail_crossChainQuotes_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_1) { - addContext(test, message.vali_crossChainQuotes_2); - console.log(message.vali_crossChainQuotes_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without toChainId detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.arbitrum_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_3); - assert.fail(message.fail_crossChainQuotes_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_2) { - addContext(test, message.vali_crossChainQuotes_3); - console.log(message.vali_crossChainQuotes_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid fromTokenAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.invalidTokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_4); - assert.fail(message.fail_crossChainQuotes_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_4); - console.log(message.vali_crossChainQuotes_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect fromTokenAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.incorrectTokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_5); - assert.fail(message.fail_crossChainQuotes_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_5); - console.log(message.vali_crossChainQuotes_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromTokenAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_6); - assert.fail(message.fail_crossChainQuotes_6); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_6); - console.log(message.vali_crossChainQuotes_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid toTokenAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.invalidTokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_7); - assert.fail(message.fail_crossChainQuotes_7); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_7); - console.log(message.vali_crossChainQuotes_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect toTokenAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.incorrectTokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_8); - assert.fail(message.fail_crossChainQuotes_8); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_8); - console.log(message.vali_crossChainQuotes_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without toTokenAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_9); - assert.fail(message.fail_crossChainQuotes_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_9); - console.log(message.vali_crossChainQuotes_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid fromAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.invalidSender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_10); - assert.fail(message.fail_crossChainQuotes_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_10); - assert.fail(message.vali_crossChainQuotes_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect fromAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.incorrectSender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_11); - assert.fail(message.fail_crossChainQuotes_11); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_11); - assert.fail(message.vali_crossChainQuotes_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - addContext(test, message.fail_crossChainQuotes_12); - assert.fail(message.fail_crossChainQuotes_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromAmount detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - }; - - await arbitrumDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_12); - assert.fail(message.fail_crossChainQuotes_12); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_12); - console.log(message.vali_crossChainQuotes_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromChainId detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_2); - assert.fail(fail_advanceRoutesLiFi_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_1) { - addContext(test, message.vali_advanceRoutesLiFi_2); - console.log(message.vali_advanceRoutesLiFi_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without toChainId detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.arbitrum_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_3); - assert.fail(fail_advanceRoutesLiFi_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_2) { - addContext(test, message.vali_advanceRoutesLifi_3); - console.log(message.vali_advanceRoutesLifi_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid fromTokenAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.invalidTokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_4); - assert.fail(fail_advanceRoutesLiFi_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_4); - console.log(message.vali_advanceRoutesLifi_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect fromTokenAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.incorrectTokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_5); - assert.fail(fail_advanceRoutesLiFi_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_5); - console.log(message.vali_advanceRoutesLifi_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromTokenAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_6); - assert.fail(fail_advanceRoutesLiFi_6); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_6); - console.log(message.vali_advanceRoutesLifi_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid toTokenAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.invalidTokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_7); - assert.fail(fail_advanceRoutesLiFi_7); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_7); - console.log(message.vali_advanceRoutesLifi_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect toTokenAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.incorrectTokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_8); - assert.fail(fail_advanceRoutesLiFi_8); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_8); - console.log(message.vali_advanceRoutesLifi_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without toTokenAddress detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await arbitrumDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_9); - assert.fail(fail_advanceRoutesLiFi_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_9); - console.log(message.vali_advanceRoutesLifi_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromAmount detail on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.arbitrum_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_arbitrumUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - }; - - await arbitrumDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_10); - assert.fail(fail_advanceRoutesLiFi_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.IsBigNumberish === - constant.invalid_bignumber_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_10); - console.log(message.vali_advanceRoutesLifi_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/swap/matic.spec.js b/test/specs/mainnet/swap/matic.spec.js deleted file mode 100644 index 442a46b..0000000 --- a/test/specs/mainnet/swap/matic.spec.js +++ /dev/null @@ -1,2206 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils, constants, BigNumber } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let maticMainNetSdk; -let maticEtherspotWalletAddress; -let maticNativeAddress = null; -let maticDataService; -let runTest; - -describe('The PrimeSDK, when get cross chain quotes and get advance routes LiFi transaction details with matic network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - // initializating sdk - try { - maticMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.matic_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.matic_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - maticMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - maticEtherspotWalletAddress = - await maticMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - maticEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - maticDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await maticDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.matic_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === maticNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_maticUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the Exchange offers response with ERC20 to ERC20 and valid details on the matic network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await maticDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.matic_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - let offers; - try { - let fromChainId = data.matic_chainid; - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_maticUSDC; - let toTokenAddress = data.tokenAddress_maticUSDT; - let fromAmount = data.exchange_offer_value; - - offers = await maticDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - if (offers.length > 0) { - for (let i = 0; i < offers.length; i++) { - try { - assert.isNotEmpty( - offers[i].provider, - message.vali_exchangeOffers_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].receiveAmount, - message.vali_exchangeOffers_receiveAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - offers[i].exchangeRate, - message.vali_exchangeOffers_exchangeRate - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].transactions, - message.vali_exchangeOffers_transactions - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } else { - addContext(test, message.vali_exchangeOffers_3); - console.log(message.vali_exchangeOffers_3); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Exchange offers response with ERC20 to Native Token and valid details on the matic network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await maticDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.matic_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - let offers; - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_maticUSDC; - let toTokenAddress = constants.AddressZero; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.matic_chainid; - - offers = await maticDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - if (offers.length > 0) { - for (let i = 0; i < offers.length; i++) { - try { - assert.isNotEmpty( - offers[i].provider, - message.vali_exchangeOffers_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].receiveAmount, - message.vali_exchangeOffers_receiveAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - offers[i].exchangeRate, - message.vali_exchangeOffers_exchangeRate - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].transactions, - message.vali_exchangeOffers_transactions - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } else { - addContext(test, message.vali_exchangeOffers_3); - console.log(message.vali_exchangeOffers_3); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the getCrossChainQuotes response with valid details on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - let quotes; - try { - quoteRequestPayload = { - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - quotes = - await maticDataService.getCrossChainQuotes(quoteRequestPayload); - - if (quotes.items.length > 0) { - try { - assert.isNotEmpty( - quotes.items[0].provider, - message.vali_crossChainQuotes_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.data, - message.vali_crossChainQuotes_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.to, - message.vali_crossChainQuotes_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.value, - message.vali_crossChainQuotes_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.from, - message.vali_crossChainQuotes_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].transaction.chainId, - message.vali_crossChainQuotes_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_crossChainQuotes_1); - console.log(message.vali_crossChainQuotes_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the getAdvanceRoutesLiFi response with valid details on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - let quotes; - let stepTransaction; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - quotes = - await maticDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - if (quotes.items.length > 0) { - const quote = quotes.items[0]; // Selected the first route - stepTransaction = await maticDataService.getStepTransaction({ - route: quote, - account: data.sender, - }); - - try { - assert.isNotEmpty( - quotes.items[0].id, - message.vali_advanceRoutesLiFi_id - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].fromChainId, - message.vali_advanceRoutesLiFi_fromChainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAmountUSD, - message.vali_advanceRoutesLiFi_fromAmountUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAmount, - message.vali_advanceRoutesLiFi_fromAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromToken, - message.vali_advanceRoutesLiFi_fromToken - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAddress, - message.vali_advanceRoutesLiFi_fromAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].toChainId, - message.vali_advanceRoutesLiFi_toChainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmountUSD, - message.vali_advanceRoutesLiFi_toAmountUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmount, - message.vali_advanceRoutesLiFi_toAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmountMin, - message.vali_advanceRoutesLiFi_toAmountMin - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toToken, - message.vali_advanceRoutesLiFi_toToken - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAddress, - message.vali_advanceRoutesLiFi_toAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].gasCostUSD, - message.vali_advanceRoutesLiFi_gasCostUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].data, - message.vali_stepTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].value, - message.vali_stepTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].to, - message.vali_stepTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - stepTransaction.items[0].chainId, - message.vali_stepTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].transactionType, - message.vali_stepTransaction_transactionType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].transactionType, - message.vali_stepTransaction_transactionType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].data, - message.vali_stepTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].to, - message.vali_stepTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].value, - message.vali_stepTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - stepTransaction.items[1].chainId, - message.vali_stepTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].gasLimit, - message.vali_stepTransaction_gasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].gasPrice, - message.vali_stepTransaction_gasPrice - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_advanceRoutesLiFi_1); - console.log(message.vali_advanceRoutesLiFi_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_advanceRoutesLiFi_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid fromTokenAddress details on the matic network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await maticDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.matic_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.invalidTokenAddress_maticUSDC; // Invalid fromTokenAddress - let toTokenAddress = data.tokenAddress_maticUSDT; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.matic_chainid; - - await maticDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_2); - assert.fail(message.fail_exchangeOffers_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_exchangeOffers_4); - console.log(message.vali_exchangeOffers_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without fromTokenAddress details on the matic network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await maticDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.matic_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let toTokenAddress = data.tokenAddress_maticUSDT; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.matic_chainid; - - await maticDataService.getExchangeOffers({ - fromAddress, - fromChainId, - // without fromTokenAddress - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_3); - assert.fail(message.fail_exchangeOffers_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_exchangeOffers_5); - console.log(message.vali_exchangeOffers_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid toTokenAddress details on the matic network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await maticDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.matic_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_maticUSDC; - let toTokenAddress = data.invalidTokenAddress_maticUSDT; // Invalid toTokenAddress - let fromAmount = data.exchange_offer_value; - let fromChainId = data.matic_chainid; - - await maticDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_4); - assert.fail(message.fail_exchangeOffers_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_exchangeOffers_6); - console.log(message.vali_exchangeOffers_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without toTokenAddress details on the matic network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await maticDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.matic_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_maticUSDC; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.matic_chainid; - - await maticDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - // without toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_5); - assert.fail(message.fail_exchangeOffers_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_exchangeOffers_7); - console.log(message.vali_exchangeOffers_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid fromAmount on the matic network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await maticDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.matic_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_maticUSDC; - let toTokenAddress = data.tokenAddress_maticUSDT; - let fromAmount = data.invalidValue; // invalid fromAmount - let fromChainId = data.matic_chainid; - - await maticDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_6); - assert.fail(message.fail_exchangeOffers_6); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_exchangeOffers_8); - console.log(message.vali_exchangeOffers_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with decimal fromAmount on the matic network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await maticDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.matic_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_maticUSDC; - let toTokenAddress = data.tokenAddress_maticUSDT; - let fromAmount = data.exchange_offer_decimal_value; // decimal fromAmount - let fromChainId = data.matic_chainid; - - await maticDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_7); - assert.fail(message.fail_exchangeOffers_7); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_exchangeOffers_9); - console.log(message.vali_exchangeOffers_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without fromAmount on the matic network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await maticDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.matic_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_maticUSDC; - let toTokenAddress = data.tokenAddress_maticUSDT; - let fromChainId = data.matic_chainid; - - await maticDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - // without fromAmount - }); - - addContext(test, message.fail_exchangeOffers_9); - assert.fail(message.fail_exchangeOffers_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.IsBigNumberish === - constant.invalid_bignumber_2 - ) { - addContext(test, message.vali_exchangeOffers_11); - console.log(message.vali_exchangeOffers_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromChainId detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_2); - assert.fail(message.fail_crossChainQuotes_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_1) { - addContext(test, message.vali_crossChainQuotes_2); - console.log(message.vali_crossChainQuotes_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without toChainId detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_3); - assert.fail(message.fail_crossChainQuotes_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_2) { - addContext(test, message.vali_crossChainQuotes_3); - console.log(message.vali_crossChainQuotes_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid fromTokenAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.invalidTokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_4); - assert.fail(message.fail_crossChainQuotes_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_4); - console.log(message.vali_crossChainQuotes_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect fromTokenAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.incorrectTokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_5); - assert.fail(message.fail_crossChainQuotes_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_5); - console.log(message.vali_crossChainQuotes_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromTokenAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_6); - assert.fail(message.fail_crossChainQuotes_6); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_6); - console.log(message.vali_crossChainQuotes_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid toTokenAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.invalidTokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_7); - assert.fail(message.fail_crossChainQuotes_7); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_7); - console.log(message.vali_crossChainQuotes_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect toTokenAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.incorrectTokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_8); - assert.fail(message.fail_crossChainQuotes_8); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_8); - console.log(message.vali_crossChainQuotes_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without toTokenAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_9); - assert.fail(message.fail_crossChainQuotes_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_9); - console.log(message.vali_crossChainQuotes_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid fromAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.invalidSender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_10); - assert.fail(message.fail_crossChainQuotes_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_10); - assert.fail(message.vali_crossChainQuotes_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect fromAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.incorrectSender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_11); - assert.fail(message.fail_crossChainQuotes_11); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_11); - assert.fail(message.vali_crossChainQuotes_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - addContext(test, message.fail_crossChainQuotes_12); - assert.fail(message.fail_crossChainQuotes_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromAmount detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - }; - - await maticDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_12); - assert.fail(message.fail_crossChainQuotes_12); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_12); - console.log(message.vali_crossChainQuotes_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromChainId detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_2); - assert.fail(fail_advanceRoutesLiFi_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_1) { - addContext(test, message.vali_advanceRoutesLiFi_2); - console.log(message.vali_advanceRoutesLiFi_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without toChainId detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_3); - assert.fail(fail_advanceRoutesLiFi_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_2) { - addContext(test, message.vali_advanceRoutesLifi_3); - console.log(message.vali_advanceRoutesLifi_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid fromTokenAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.invalidTokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_4); - assert.fail(fail_advanceRoutesLiFi_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_4); - console.log(message.vali_advanceRoutesLifi_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect fromTokenAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.incorrectTokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_5); - assert.fail(fail_advanceRoutesLiFi_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_5); - console.log(message.vali_advanceRoutesLifi_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromTokenAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_6); - assert.fail(fail_advanceRoutesLiFi_6); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_6); - console.log(message.vali_advanceRoutesLifi_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid toTokenAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.invalidTokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_7); - assert.fail(fail_advanceRoutesLiFi_7); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_7); - console.log(message.vali_advanceRoutesLifi_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect toTokenAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.incorrectTokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_8); - assert.fail(fail_advanceRoutesLiFi_8); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_8); - console.log(message.vali_advanceRoutesLifi_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without toTokenAddress detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await maticDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_9); - assert.fail(fail_advanceRoutesLiFi_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_9); - console.log(message.vali_advanceRoutesLifi_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromAmount detail on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.matic_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_maticUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - }; - - await maticDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_10); - assert.fail(fail_advanceRoutesLiFi_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.IsBigNumberish === - constant.invalid_bignumber_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_10); - console.log(message.vali_advanceRoutesLifi_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/swap/newWallet_swap.spec.js b/test/specs/mainnet/swap/newWallet_swap.spec.js new file mode 100644 index 0000000..6e9a78e --- /dev/null +++ b/test/specs/mainnet/swap/newWallet_swap.spec.js @@ -0,0 +1,2380 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { utils, constants, BigNumber } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomIncorrectToTokenAddress, + randomIncorrectTokenAddress, + randomInvalidToTokenAddress, + randomInvalidTokenAddress, + randomInvalidTokenAddressUsdt, + randomToChainId, + randomToTokenAddress, + randomTokenAddress, + randomTokenAddressUsdt, +} from '../../../utils/sharedData_mainnet.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let mainnetPrimeSdk; +let nativeAddress = null; +let dataService; +let runTest; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Perform the get cross chain quotes and get advance routes LiFi transaction details on the MainNet (with new wallet)', function () { + before(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }); + + it( + 'SMOKE: Validate the Exchange offers response with ERC20 to ERC20 and valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].address, + message.vali_exchangeOffers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].chainId, + message.vali_exchangeOffers_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].name, + message.vali_exchangeOffers_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].symbol, + message.vali_exchangeOffers_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].decimals, + message.vali_exchangeOffers_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].logoURI, + message.vali_exchangeOffers_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + let offers; + try { + let fromChainId = randomChainId; + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_value; + + offers = await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + if (offers.length > 0) { + for (let i = 0; i < offers.length; i++) { + try { + assert.isNotEmpty( + offers[i].provider, + message.vali_exchangeOffers_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].receiveAmount, + message.vali_exchangeOffers_receiveAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + offers[i].exchangeRate, + message.vali_exchangeOffers_exchangeRate + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].transactions, + message.vali_exchangeOffers_transactions + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } else { + addContext(test, message.vali_exchangeOffers_3); + console.log(message.vali_exchangeOffers_3); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the Exchange offers response with ERC20 to Native Token and valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].address, + message.vali_exchangeOffers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].chainId, + message.vali_exchangeOffers_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].name, + message.vali_exchangeOffers_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].symbol, + message.vali_exchangeOffers_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].decimals, + message.vali_exchangeOffers_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].logoURI, + message.vali_exchangeOffers_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + let offers; + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = constants.AddressZero; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + offers = await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + if (offers.length > 0) { + for (let i = 0; i < offers.length; i++) { + try { + assert.isNotEmpty( + offers[i].provider, + message.vali_exchangeOffers_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].receiveAmount, + message.vali_exchangeOffers_receiveAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + offers[i].exchangeRate, + message.vali_exchangeOffers_exchangeRate + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].transactions, + message.vali_exchangeOffers_transactions + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } else { + addContext(test, message.vali_exchangeOffers_3); + console.log(message.vali_exchangeOffers_3); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the getCrossChainQuotes response with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + let quotes; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + quotes = await dataService.getCrossChainQuotes(quoteRequestPayload); + + if (quotes.items.length > 0) { + try { + assert.isNotEmpty( + quotes.items[0].provider, + message.vali_crossChainQuotes_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.data, + message.vali_crossChainQuotes_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.to, + message.vali_crossChainQuotes_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.value, + message.vali_crossChainQuotes_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.from, + message.vali_crossChainQuotes_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + quotes.items[0].transaction.chainId, + message.vali_crossChainQuotes_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_crossChainQuotes_1); + console.log(message.vali_crossChainQuotes_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the getAdvanceRoutesLiFi response with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + let quotes; + let stepTransaction; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + quotes = + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + if (quotes.items.length > 0) { + const quote = quotes.items[0]; // Selected the first route + stepTransaction = await dataService.getStepTransaction({ + route: quote, + account: data.sender, + }); + + try { + assert.isNotEmpty( + quotes.items[0].id, + message.vali_advanceRoutesLiFi_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + quotes.items[0].fromChainId, + message.vali_advanceRoutesLiFi_fromChainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromAmountUSD, + message.vali_advanceRoutesLiFi_fromAmountUSD + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromAmount, + message.vali_advanceRoutesLiFi_fromAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromToken, + message.vali_advanceRoutesLiFi_fromToken + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromAddress, + message.vali_advanceRoutesLiFi_fromAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + quotes.items[0].toChainId, + message.vali_advanceRoutesLiFi_toChainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAmountUSD, + message.vali_advanceRoutesLiFi_toAmountUSD + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAmount, + message.vali_advanceRoutesLiFi_toAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAmountMin, + message.vali_advanceRoutesLiFi_toAmountMin + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toToken, + message.vali_advanceRoutesLiFi_toToken + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAddress, + message.vali_advanceRoutesLiFi_toAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].gasCostUSD, + message.vali_advanceRoutesLiFi_gasCostUSD + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].data, + message.vali_stepTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].value, + message.vali_stepTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].to, + message.vali_stepTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + stepTransaction.items[0].chainId, + message.vali_stepTransaction_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].transactionType, + message.vali_stepTransaction_transactionType + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].transactionType, + message.vali_stepTransaction_transactionType + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].data, + message.vali_stepTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].to, + message.vali_stepTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].value, + message.vali_stepTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + stepTransaction.items[1].chainId, + message.vali_stepTransaction_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].gasLimit, + message.vali_stepTransaction_gasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].gasPrice, + message.vali_stepTransaction_gasPrice + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_advanceRoutesLiFi_1); + console.log(message.vali_advanceRoutesLiFi_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_advanceRoutesLiFi_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with invalid fromTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].address, + message.vali_exchangeOffers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].chainId, + message.vali_exchangeOffers_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].name, + message.vali_exchangeOffers_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].symbol, + message.vali_exchangeOffers_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].decimals, + message.vali_exchangeOffers_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].logoURI, + message.vali_exchangeOffers_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomInvalidTokenAddress; // Invalid fromTokenAddress + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_2); + assert.fail(message.fail_exchangeOffers_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_exchangeOffers_4); + console.log(message.vali_exchangeOffers_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response without fromTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + // without fromTokenAddress + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_3); + assert.fail(message.fail_exchangeOffers_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_exchangeOffers_5); + console.log(message.vali_exchangeOffers_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with invalid toTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomInvalidTokenAddressUsdt; // Invalid toTokenAddress + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_4); + assert.fail(message.fail_exchangeOffers_4); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_exchangeOffers_6); + console.log(message.vali_exchangeOffers_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response without toTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + // without toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_5); + assert.fail(message.fail_exchangeOffers_5); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_exchangeOffers_7); + console.log(message.vali_exchangeOffers_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with invalid fromAmount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.invalidValue; // invalid fromAmount + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_6); + assert.fail(message.fail_exchangeOffers_6); + } catch (e) { + if (e.reason === constant.invalid_bignumber_1) { + addContext(test, message.vali_exchangeOffers_8); + console.log(message.vali_exchangeOffers_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with decimal fromAmount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_decimal_value; // decimal fromAmount + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_7); + assert.fail(message.fail_exchangeOffers_7); + } catch (e) { + if (e.reason === constant.invalid_bignumber_1) { + addContext(test, message.vali_exchangeOffers_9); + console.log(message.vali_exchangeOffers_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response without fromAmount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + // without fromAmount + }); + + addContext(test, message.fail_exchangeOffers_9); + assert.fail(message.fail_exchangeOffers_9); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.IsBigNumberish === + constant.invalid_bignumber_2 + ) { + addContext(test, message.vali_exchangeOffers_11); + console.log(message.vali_exchangeOffers_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without fromChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_2); + assert.fail(message.fail_crossChainQuotes_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_1) { + addContext(test, message.vali_crossChainQuotes_2); + console.log(message.vali_crossChainQuotes_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without toChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_3); + assert.fail(message.fail_crossChainQuotes_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_2) { + addContext(test, message.vali_crossChainQuotes_3); + console.log(message.vali_crossChainQuotes_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with invalid fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomInvalidTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_4); + assert.fail(message.fail_crossChainQuotes_4); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_crossChainQuotes_4); + console.log(message.vali_crossChainQuotes_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with incorrect fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomIncorrectTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_5); + assert.fail(message.fail_crossChainQuotes_5); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_crossChainQuotes_5); + console.log(message.vali_crossChainQuotes_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_6); + assert.fail(message.fail_crossChainQuotes_6); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_crossChainQuotes_6); + console.log(message.vali_crossChainQuotes_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with invalid toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomInvalidToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_7); + assert.fail(message.fail_crossChainQuotes_7); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_crossChainQuotes_7); + console.log(message.vali_crossChainQuotes_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with incorrect toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomIncorrectToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_8); + assert.fail(message.fail_crossChainQuotes_8); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_crossChainQuotes_8); + console.log(message.vali_crossChainQuotes_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_9); + assert.fail(message.fail_crossChainQuotes_9); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_crossChainQuotes_9); + console.log(message.vali_crossChainQuotes_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with invalid fromAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.invalidSender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_10); + assert.fail(message.fail_crossChainQuotes_10); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_crossChainQuotes_10); + assert.fail(message.vali_crossChainQuotes_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with incorrect fromAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.incorrectSender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_11); + assert.fail(message.fail_crossChainQuotes_11); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_crossChainQuotes_11); + assert.fail(message.vali_crossChainQuotes_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + addContext(test, message.fail_crossChainQuotes_12); + assert.fail(message.fail_crossChainQuotes_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without fromAmount detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_12); + assert.fail(message.fail_crossChainQuotes_12); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_crossChainQuotes_12); + console.log(message.vali_crossChainQuotes_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_12); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without fromChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_2); + assert.fail(fail_advanceRoutesLiFi_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_1) { + addContext(test, message.vali_advanceRoutesLiFi_2); + console.log(message.vali_advanceRoutesLiFi_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without toChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_3); + assert.fail(fail_advanceRoutesLiFi_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_2) { + addContext(test, message.vali_advanceRoutesLifi_3); + console.log(message.vali_advanceRoutesLifi_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomInvalidTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_4); + assert.fail(fail_advanceRoutesLiFi_4); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_advanceRoutesLifi_4); + console.log(message.vali_advanceRoutesLifi_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomIncorrectTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_5); + assert.fail(fail_advanceRoutesLiFi_5); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_advanceRoutesLifi_5); + console.log(message.vali_advanceRoutesLifi_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_6); + assert.fail(fail_advanceRoutesLiFi_6); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_advanceRoutesLifi_6); + console.log(message.vali_advanceRoutesLifi_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomInvalidToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_7); + assert.fail(fail_advanceRoutesLiFi_7); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_7); + console.log(message.vali_advanceRoutesLifi_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomIncorrectToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_8); + assert.fail(fail_advanceRoutesLiFi_8); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_8); + console.log(message.vali_advanceRoutesLifi_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_9); + assert.fail(fail_advanceRoutesLiFi_9); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_9); + console.log(message.vali_advanceRoutesLifi_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without fromAmount detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_10); + assert.fail(fail_advanceRoutesLiFi_10); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.IsBigNumberish === + constant.invalid_bignumber_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_10); + console.log(message.vali_advanceRoutesLifi_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/mainnet/swap/newWallet_xdai.spec.js b/test/specs/mainnet/swap/newWallet_xdai.spec.js deleted file mode 100644 index 99718e2..0000000 --- a/test/specs/mainnet/swap/newWallet_xdai.spec.js +++ /dev/null @@ -1,2213 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils, constants, BigNumber } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import testUtils from '../../../utils/testUtils.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; -import { dirname } from 'path'; -import { fileURLToPath } from 'url'; -import fs from 'fs'; -import path from 'path'; - -let xdaiMainNetSdk; -let xdaiNativeAddress = null; -let xdaiDataService; -let runTest; -const __dirname = dirname(fileURLToPath(import.meta.url)); - -describe('The PrimeSDK, when get cross chain quotes and get advance routes LiFi transaction details with xdai network on the MainNet (with new wallet)', function () { - before(async function () { - const filePath = path.join(__dirname, '../../../utils/testUtils.json'); - const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: sharedState.newPrivateKey }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // initializating Data service... - try { - xdaiDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - }, data.retry); // Retry this async test up to 5 times - }); - - beforeEach(async function () { - // validate the balance of the wallet - try { - let output = await xdaiDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === xdaiNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_xdaiUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }); - - it('SMOKE: Validate the Exchange offers response with ERC20 to ERC20 and valid details on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - let offers; - try { - let fromChainId = data.xdai_chainid; - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let toTokenAddress = data.tokenAddress_xdaiUSDT; - let fromAmount = data.exchange_offer_value; - - offers = await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - if (offers.length > 0) { - for (let i = 0; i < offers.length; i++) { - try { - assert.isNotEmpty( - offers[i].provider, - message.vali_exchangeOffers_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].receiveAmount, - message.vali_exchangeOffers_receiveAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - offers[i].exchangeRate, - message.vali_exchangeOffers_exchangeRate - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].transactions, - message.vali_exchangeOffers_transactions - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } else { - addContext(test, message.vali_exchangeOffers_3); - console.log(message.vali_exchangeOffers_3); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Exchange offers response with ERC20 to Native Token and valid details on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - let offers; - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let toTokenAddress = constants.AddressZero; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.xdai_chainid; - - offers = await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - if (offers.length > 0) { - for (let i = 0; i < offers.length; i++) { - try { - assert.isNotEmpty( - offers[i].provider, - message.vali_exchangeOffers_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].receiveAmount, - message.vali_exchangeOffers_receiveAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - offers[i].exchangeRate, - message.vali_exchangeOffers_exchangeRate - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].transactions, - message.vali_exchangeOffers_transactions - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } else { - addContext(test, message.vali_exchangeOffers_3); - console.log(message.vali_exchangeOffers_3); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the getCrossChainQuotes response with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - let quotes; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - quotes = - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - if (quotes.items.length > 0) { - try { - assert.isNotEmpty( - quotes.items[0].provider, - message.vali_crossChainQuotes_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.data, - message.vali_crossChainQuotes_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.to, - message.vali_crossChainQuotes_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.value, - message.vali_crossChainQuotes_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.from, - message.vali_crossChainQuotes_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].transaction.chainId, - message.vali_crossChainQuotes_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_crossChainQuotes_1); - console.log(message.vali_crossChainQuotes_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the getAdvanceRoutesLiFi response with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - let quotes; - let stepTransaction; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - quotes = - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - if (quotes.items.length > 0) { - const quote = quotes.items[0]; // Selected the first route - stepTransaction = await xdaiDataService.getStepTransaction({ - route: quote, - account: data.sender, - }); - - try { - assert.isNotEmpty( - quotes.items[0].id, - message.vali_advanceRoutesLiFi_id - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].fromChainId, - message.vali_advanceRoutesLiFi_fromChainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAmountUSD, - message.vali_advanceRoutesLiFi_fromAmountUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAmount, - message.vali_advanceRoutesLiFi_fromAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromToken, - message.vali_advanceRoutesLiFi_fromToken - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAddress, - message.vali_advanceRoutesLiFi_fromAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].toChainId, - message.vali_advanceRoutesLiFi_toChainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmountUSD, - message.vali_advanceRoutesLiFi_toAmountUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmount, - message.vali_advanceRoutesLiFi_toAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmountMin, - message.vali_advanceRoutesLiFi_toAmountMin - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toToken, - message.vali_advanceRoutesLiFi_toToken - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAddress, - message.vali_advanceRoutesLiFi_toAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].gasCostUSD, - message.vali_advanceRoutesLiFi_gasCostUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].data, - message.vali_stepTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].value, - message.vali_stepTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].to, - message.vali_stepTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - stepTransaction.items[0].chainId, - message.vali_stepTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].transactionType, - message.vali_stepTransaction_transactionType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].transactionType, - message.vali_stepTransaction_transactionType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].data, - message.vali_stepTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].to, - message.vali_stepTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].value, - message.vali_stepTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - stepTransaction.items[1].chainId, - message.vali_stepTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].gasLimit, - message.vali_stepTransaction_gasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].gasPrice, - message.vali_stepTransaction_gasPrice - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_advanceRoutesLiFi_1); - console.log(message.vali_advanceRoutesLiFi_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_advanceRoutesLiFi_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid fromTokenAddress details on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.invalidTokenAddress_xdaiUSDC; // Invalid fromTokenAddress - let toTokenAddress = data.tokenAddress_xdaiUSDT; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_2); - assert.fail(message.fail_exchangeOffers_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_exchangeOffers_4); - console.log(message.vali_exchangeOffers_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without fromTokenAddress details on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let toTokenAddress = data.tokenAddress_xdaiUSDT; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - // without fromTokenAddress - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_3); - assert.fail(message.fail_exchangeOffers_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_exchangeOffers_5); - console.log(message.vali_exchangeOffers_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid toTokenAddress details on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let toTokenAddress = data.invalidTokenAddress_xdaiUSDT; // Invalid toTokenAddress - let fromAmount = data.exchange_offer_value; - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_4); - assert.fail(message.fail_exchangeOffers_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_exchangeOffers_6); - console.log(message.vali_exchangeOffers_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without toTokenAddress details on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - // without toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_5); - assert.fail(message.fail_exchangeOffers_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_exchangeOffers_7); - console.log(message.vali_exchangeOffers_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid fromAmount on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let toTokenAddress = data.tokenAddress_xdaiUSDT; - let fromAmount = data.invalidValue; // invalid fromAmount - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_6); - assert.fail(message.fail_exchangeOffers_6); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_exchangeOffers_8); - console.log(message.vali_exchangeOffers_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with decimal fromAmount on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let toTokenAddress = data.tokenAddress_xdaiUSDT; - let fromAmount = data.exchange_offer_decimal_value; // decimal fromAmount - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_7); - assert.fail(message.fail_exchangeOffers_7); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_exchangeOffers_9); - console.log(message.vali_exchangeOffers_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without fromAmount on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let toTokenAddress = data.tokenAddress_xdaiUSDT; - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - // without fromAmount - }); - - addContext(test, message.fail_exchangeOffers_9); - assert.fail(message.fail_exchangeOffers_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.IsBigNumberish === - constant.invalid_bignumber_2 - ) { - addContext(test, message.vali_exchangeOffers_11); - console.log(message.vali_exchangeOffers_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromChainId detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_2); - assert.fail(message.fail_crossChainQuotes_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_1) { - addContext(test, message.vali_crossChainQuotes_2); - console.log(message.vali_crossChainQuotes_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without toChainId detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_3); - assert.fail(message.fail_crossChainQuotes_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_2) { - addContext(test, message.vali_crossChainQuotes_3); - console.log(message.vali_crossChainQuotes_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid fromTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.invalidTokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_4); - assert.fail(message.fail_crossChainQuotes_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_4); - console.log(message.vali_crossChainQuotes_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect fromTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.incorrectTokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_5); - assert.fail(message.fail_crossChainQuotes_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_5); - console.log(message.vali_crossChainQuotes_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_6); - assert.fail(message.fail_crossChainQuotes_6); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_6); - console.log(message.vali_crossChainQuotes_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid toTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.invalidTokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_7); - assert.fail(message.fail_crossChainQuotes_7); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_7); - console.log(message.vali_crossChainQuotes_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect toTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.incorrectTokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_8); - assert.fail(message.fail_crossChainQuotes_8); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_8); - console.log(message.vali_crossChainQuotes_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without toTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_9); - assert.fail(message.fail_crossChainQuotes_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_9); - console.log(message.vali_crossChainQuotes_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid fromAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.invalidSender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_10); - assert.fail(message.fail_crossChainQuotes_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_10); - assert.fail(message.vali_crossChainQuotes_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect fromAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.incorrectSender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_11); - assert.fail(message.fail_crossChainQuotes_11); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_11); - assert.fail(message.vali_crossChainQuotes_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - addContext(test, message.fail_crossChainQuotes_12); - assert.fail(message.fail_crossChainQuotes_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromAmount detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_12); - assert.fail(message.fail_crossChainQuotes_12); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_12); - console.log(message.vali_crossChainQuotes_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromChainId detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_2); - assert.fail(fail_advanceRoutesLiFi_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_1) { - addContext(test, message.vali_advanceRoutesLiFi_2); - console.log(message.vali_advanceRoutesLiFi_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without toChainId detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_3); - assert.fail(fail_advanceRoutesLiFi_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_2) { - addContext(test, message.vali_advanceRoutesLifi_3); - console.log(message.vali_advanceRoutesLifi_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid fromTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.invalidTokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_4); - assert.fail(fail_advanceRoutesLiFi_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_4); - console.log(message.vali_advanceRoutesLifi_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect fromTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.incorrectTokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_5); - assert.fail(fail_advanceRoutesLiFi_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_5); - console.log(message.vali_advanceRoutesLifi_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_6); - assert.fail(fail_advanceRoutesLiFi_6); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_6); - console.log(message.vali_advanceRoutesLifi_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid toTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.invalidTokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_7); - assert.fail(fail_advanceRoutesLiFi_7); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_7); - console.log(message.vali_advanceRoutesLifi_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect toTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.incorrectTokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_8); - assert.fail(fail_advanceRoutesLiFi_8); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_8); - console.log(message.vali_advanceRoutesLifi_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without toTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_9); - assert.fail(fail_advanceRoutesLiFi_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_9); - console.log(message.vali_advanceRoutesLifi_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromAmount detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_10); - assert.fail(fail_advanceRoutesLiFi_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.IsBigNumberish === - constant.invalid_bignumber_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_10); - console.log(message.vali_advanceRoutesLifi_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/swap/oldWallet_swap.spec.js b/test/specs/mainnet/swap/oldWallet_swap.spec.js new file mode 100644 index 0000000..56a2923 --- /dev/null +++ b/test/specs/mainnet/swap/oldWallet_swap.spec.js @@ -0,0 +1,2412 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { utils, constants, BigNumber } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomIncorrectToTokenAddress, + randomIncorrectTokenAddress, + randomInvalidToTokenAddress, + randomInvalidTokenAddress, + randomInvalidTokenAddressUsdt, + randomToChainId, + randomToTokenAddress, + randomTokenAddress, + randomTokenAddressUsdt, +} from '../../../utils/sharedData_mainnet.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; + +let mainnetPrimeSdk; +let etherspotWalletAddress; +let nativeAddress = null; +let dataService; +let runTest; + +describe('Perform the get cross chain quotes and get advance routes LiFi transaction details on the MainNet (with old wallet)', function () { + before(async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + + try { + assert.strictEqual( + mainnetPrimeSdk.state.EOAAddress, + data.eoaAddress, + message.vali_eoa_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get EtherspotWallet address + try { + etherspotWalletAddress = + await mainnetPrimeSdk.getCounterFactualAddress(); + + try { + assert.strictEqual( + etherspotWalletAddress, + data.sender, + message.vali_smart_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e.message); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + var test = this; + + await customRetryAsync(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + it( + 'SMOKE: Validate the Exchange offers response with ERC20 to ERC20 and valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].address, + message.vali_exchangeOffers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].chainId, + message.vali_exchangeOffers_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].name, + message.vali_exchangeOffers_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].symbol, + message.vali_exchangeOffers_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].decimals, + message.vali_exchangeOffers_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].logoURI, + message.vali_exchangeOffers_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + let offers; + try { + let fromChainId = randomChainId; + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_value; + + offers = await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + if (offers.length > 0) { + for (let i = 0; i < offers.length; i++) { + try { + assert.isNotEmpty( + offers[i].provider, + message.vali_exchangeOffers_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].receiveAmount, + message.vali_exchangeOffers_receiveAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + offers[i].exchangeRate, + message.vali_exchangeOffers_exchangeRate + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].transactions, + message.vali_exchangeOffers_transactions + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } else { + addContext(test, message.vali_exchangeOffers_3); + console.log(message.vali_exchangeOffers_3); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the Exchange offers response with ERC20 to Native Token and valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].address, + message.vali_exchangeOffers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].chainId, + message.vali_exchangeOffers_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].name, + message.vali_exchangeOffers_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].symbol, + message.vali_exchangeOffers_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].decimals, + message.vali_exchangeOffers_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].logoURI, + message.vali_exchangeOffers_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + let offers; + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = constants.AddressZero; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + offers = await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + if (offers.length > 0) { + for (let i = 0; i < offers.length; i++) { + try { + assert.isNotEmpty( + offers[i].provider, + message.vali_exchangeOffers_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].receiveAmount, + message.vali_exchangeOffers_receiveAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + offers[i].exchangeRate, + message.vali_exchangeOffers_exchangeRate + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].transactions, + message.vali_exchangeOffers_transactions + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } else { + addContext(test, message.vali_exchangeOffers_3); + console.log(message.vali_exchangeOffers_3); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the getCrossChainQuotes response with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + let quotes; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + quotes = await dataService.getCrossChainQuotes(quoteRequestPayload); + + if (quotes.items.length > 0) { + try { + assert.isNotEmpty( + quotes.items[0].provider, + message.vali_crossChainQuotes_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.data, + message.vali_crossChainQuotes_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.to, + message.vali_crossChainQuotes_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.value, + message.vali_crossChainQuotes_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.from, + message.vali_crossChainQuotes_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + quotes.items[0].transaction.chainId, + message.vali_crossChainQuotes_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_crossChainQuotes_1); + console.log(message.vali_crossChainQuotes_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the getAdvanceRoutesLiFi response with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + let quotes; + let stepTransaction; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + quotes = + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + if (quotes.items.length > 0) { + const quote = quotes.items[0]; // Selected the first route + stepTransaction = await dataService.getStepTransaction({ + route: quote, + account: data.sender, + }); + + try { + assert.isNotEmpty( + quotes.items[0].id, + message.vali_advanceRoutesLiFi_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + quotes.items[0].fromChainId, + message.vali_advanceRoutesLiFi_fromChainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromAmountUSD, + message.vali_advanceRoutesLiFi_fromAmountUSD + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromAmount, + message.vali_advanceRoutesLiFi_fromAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromToken, + message.vali_advanceRoutesLiFi_fromToken + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromAddress, + message.vali_advanceRoutesLiFi_fromAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + quotes.items[0].toChainId, + message.vali_advanceRoutesLiFi_toChainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAmountUSD, + message.vali_advanceRoutesLiFi_toAmountUSD + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAmount, + message.vali_advanceRoutesLiFi_toAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAmountMin, + message.vali_advanceRoutesLiFi_toAmountMin + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toToken, + message.vali_advanceRoutesLiFi_toToken + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAddress, + message.vali_advanceRoutesLiFi_toAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].gasCostUSD, + message.vali_advanceRoutesLiFi_gasCostUSD + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].data, + message.vali_stepTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].value, + message.vali_stepTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].to, + message.vali_stepTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + stepTransaction.items[0].chainId, + message.vali_stepTransaction_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].transactionType, + message.vali_stepTransaction_transactionType + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].transactionType, + message.vali_stepTransaction_transactionType + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].data, + message.vali_stepTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].to, + message.vali_stepTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].value, + message.vali_stepTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + stepTransaction.items[1].chainId, + message.vali_stepTransaction_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].gasLimit, + message.vali_stepTransaction_gasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].gasPrice, + message.vali_stepTransaction_gasPrice + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_advanceRoutesLiFi_1); + console.log(message.vali_advanceRoutesLiFi_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_advanceRoutesLiFi_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with invalid fromTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].address, + message.vali_exchangeOffers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].chainId, + message.vali_exchangeOffers_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].name, + message.vali_exchangeOffers_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].symbol, + message.vali_exchangeOffers_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].decimals, + message.vali_exchangeOffers_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].logoURI, + message.vali_exchangeOffers_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomInvalidTokenAddress; // Invalid fromTokenAddress + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_2); + assert.fail(message.fail_exchangeOffers_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_exchangeOffers_4); + console.log(message.vali_exchangeOffers_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response without fromTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + // without fromTokenAddress + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_3); + assert.fail(message.fail_exchangeOffers_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_exchangeOffers_5); + console.log(message.vali_exchangeOffers_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with invalid toTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomInvalidTokenAddressUsdt; // Invalid toTokenAddress + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_4); + assert.fail(message.fail_exchangeOffers_4); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_exchangeOffers_6); + console.log(message.vali_exchangeOffers_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response without toTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + // without toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_5); + assert.fail(message.fail_exchangeOffers_5); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_exchangeOffers_7); + console.log(message.vali_exchangeOffers_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with invalid fromAmount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.invalidValue; // invalid fromAmount + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_6); + assert.fail(message.fail_exchangeOffers_6); + } catch (e) { + if (e.reason === constant.invalid_bignumber_1) { + addContext(test, message.vali_exchangeOffers_8); + console.log(message.vali_exchangeOffers_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with decimal fromAmount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_decimal_value; // decimal fromAmount + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_7); + assert.fail(message.fail_exchangeOffers_7); + } catch (e) { + if (e.reason === constant.invalid_bignumber_1) { + addContext(test, message.vali_exchangeOffers_9); + console.log(message.vali_exchangeOffers_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response without fromAmount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + // without fromAmount + }); + + addContext(test, message.fail_exchangeOffers_9); + assert.fail(message.fail_exchangeOffers_9); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.IsBigNumberish === + constant.invalid_bignumber_2 + ) { + addContext(test, message.vali_exchangeOffers_11); + console.log(message.vali_exchangeOffers_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without fromChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_2); + assert.fail(message.fail_crossChainQuotes_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_1) { + addContext(test, message.vali_crossChainQuotes_2); + console.log(message.vali_crossChainQuotes_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without toChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_3); + assert.fail(message.fail_crossChainQuotes_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_2) { + addContext(test, message.vali_crossChainQuotes_3); + console.log(message.vali_crossChainQuotes_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with invalid fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomInvalidTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_4); + assert.fail(message.fail_crossChainQuotes_4); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_crossChainQuotes_4); + console.log(message.vali_crossChainQuotes_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with incorrect fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomIncorrectTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_5); + assert.fail(message.fail_crossChainQuotes_5); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_crossChainQuotes_5); + console.log(message.vali_crossChainQuotes_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_6); + assert.fail(message.fail_crossChainQuotes_6); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_crossChainQuotes_6); + console.log(message.vali_crossChainQuotes_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with invalid toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomInvalidToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_7); + assert.fail(message.fail_crossChainQuotes_7); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_crossChainQuotes_7); + console.log(message.vali_crossChainQuotes_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with incorrect toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomIncorrectToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_8); + assert.fail(message.fail_crossChainQuotes_8); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_crossChainQuotes_8); + console.log(message.vali_crossChainQuotes_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_9); + assert.fail(message.fail_crossChainQuotes_9); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_crossChainQuotes_9); + console.log(message.vali_crossChainQuotes_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with invalid fromAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.invalidSender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_10); + assert.fail(message.fail_crossChainQuotes_10); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_crossChainQuotes_10); + assert.fail(message.vali_crossChainQuotes_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with incorrect fromAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.incorrectSender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_11); + assert.fail(message.fail_crossChainQuotes_11); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_crossChainQuotes_11); + assert.fail(message.vali_crossChainQuotes_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + addContext(test, message.fail_crossChainQuotes_12); + assert.fail(message.fail_crossChainQuotes_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without fromAmount detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_12); + assert.fail(message.fail_crossChainQuotes_12); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_crossChainQuotes_12); + console.log(message.vali_crossChainQuotes_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_12); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without fromChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_2); + assert.fail(fail_advanceRoutesLiFi_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_1) { + addContext(test, message.vali_advanceRoutesLiFi_2); + console.log(message.vali_advanceRoutesLiFi_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without toChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_3); + assert.fail(fail_advanceRoutesLiFi_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_2) { + addContext(test, message.vali_advanceRoutesLifi_3); + console.log(message.vali_advanceRoutesLifi_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomInvalidTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_4); + assert.fail(fail_advanceRoutesLiFi_4); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_advanceRoutesLifi_4); + console.log(message.vali_advanceRoutesLifi_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomIncorrectTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_5); + assert.fail(fail_advanceRoutesLiFi_5); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_advanceRoutesLifi_5); + console.log(message.vali_advanceRoutesLifi_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_6); + assert.fail(fail_advanceRoutesLiFi_6); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_advanceRoutesLifi_6); + console.log(message.vali_advanceRoutesLifi_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomInvalidToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_7); + assert.fail(fail_advanceRoutesLiFi_7); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_7); + console.log(message.vali_advanceRoutesLifi_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomIncorrectToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_8); + assert.fail(fail_advanceRoutesLiFi_8); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_8); + console.log(message.vali_advanceRoutesLifi_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_9); + assert.fail(fail_advanceRoutesLiFi_9); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_9); + console.log(message.vali_advanceRoutesLifi_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without fromAmount detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_10); + assert.fail(fail_advanceRoutesLiFi_10); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.IsBigNumberish === + constant.invalid_bignumber_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_10); + console.log(message.vali_advanceRoutesLifi_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/mainnet/swap/optimism.spec.js b/test/specs/mainnet/swap/optimism.spec.js deleted file mode 100644 index 40e1792..0000000 --- a/test/specs/mainnet/swap/optimism.spec.js +++ /dev/null @@ -1,2206 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils, constants, BigNumber } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let optimismMainNetSdk; -let optimismEtherspotWalletAddress; -let optimismNativeAddress = null; -let optimismDataService; -let runTest; - -describe('The PrimeSDK, when get cross chain quotes and get advance routes LiFi transaction details with optimism network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - // initializating sdk - try { - optimismMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.optimism_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.optimism_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - optimismMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - optimismEtherspotWalletAddress = - await optimismMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - optimismEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - optimismDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await optimismDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === optimismNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_optimismUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the Exchange offers response with ERC20 to ERC20 and valid details on the optimism network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await optimismDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - let offers; - try { - let fromChainId = data.optimism_chainid; - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_optimismUSDC; - let toTokenAddress = data.tokenAddress_optimismUSDT; - let fromAmount = data.exchange_offer_value; - - offers = await optimismDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - if (offers.length > 0) { - for (let i = 0; i < offers.length; i++) { - try { - assert.isNotEmpty( - offers[i].provider, - message.vali_exchangeOffers_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].receiveAmount, - message.vali_exchangeOffers_receiveAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - offers[i].exchangeRate, - message.vali_exchangeOffers_exchangeRate - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].transactions, - message.vali_exchangeOffers_transactions - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } else { - addContext(test, message.vali_exchangeOffers_3); - console.log(message.vali_exchangeOffers_3); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Exchange offers response with ERC20 to Native Token and valid details on the optimism network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await optimismDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - let offers; - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_optimismUSDC; - let toTokenAddress = constants.AddressZero; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.optimism_chainid; - - offers = await optimismDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - if (offers.length > 0) { - for (let i = 0; i < offers.length; i++) { - try { - assert.isNotEmpty( - offers[i].provider, - message.vali_exchangeOffers_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].receiveAmount, - message.vali_exchangeOffers_receiveAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - offers[i].exchangeRate, - message.vali_exchangeOffers_exchangeRate - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].transactions, - message.vali_exchangeOffers_transactions - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } else { - addContext(test, message.vali_exchangeOffers_3); - console.log(message.vali_exchangeOffers_3); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the getCrossChainQuotes response with valid details on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - let quotes; - try { - quoteRequestPayload = { - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - quotes = - await optimismDataService.getCrossChainQuotes(quoteRequestPayload); - - if (quotes.items.length > 0) { - try { - assert.isNotEmpty( - quotes.items[0].provider, - message.vali_crossChainQuotes_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.data, - message.vali_crossChainQuotes_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.to, - message.vali_crossChainQuotes_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.value, - message.vali_crossChainQuotes_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.from, - message.vali_crossChainQuotes_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].transaction.chainId, - message.vali_crossChainQuotes_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_crossChainQuotes_1); - console.log(message.vali_crossChainQuotes_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the getAdvanceRoutesLiFi response with valid details on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - let quotes; - let stepTransaction; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - quotes = - await optimismDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - if (quotes.items.length > 0) { - const quote = quotes.items[0]; // Selected the first route - stepTransaction = await optimismDataService.getStepTransaction({ - route: quote, - account: data.sender, - }); - - try { - assert.isNotEmpty( - quotes.items[0].id, - message.vali_advanceRoutesLiFi_id - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].fromChainId, - message.vali_advanceRoutesLiFi_fromChainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAmountUSD, - message.vali_advanceRoutesLiFi_fromAmountUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAmount, - message.vali_advanceRoutesLiFi_fromAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromToken, - message.vali_advanceRoutesLiFi_fromToken - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAddress, - message.vali_advanceRoutesLiFi_fromAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].toChainId, - message.vali_advanceRoutesLiFi_toChainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmountUSD, - message.vali_advanceRoutesLiFi_toAmountUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmount, - message.vali_advanceRoutesLiFi_toAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmountMin, - message.vali_advanceRoutesLiFi_toAmountMin - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toToken, - message.vali_advanceRoutesLiFi_toToken - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAddress, - message.vali_advanceRoutesLiFi_toAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].gasCostUSD, - message.vali_advanceRoutesLiFi_gasCostUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].data, - message.vali_stepTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].value, - message.vali_stepTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].to, - message.vali_stepTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - stepTransaction.items[0].chainId, - message.vali_stepTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].transactionType, - message.vali_stepTransaction_transactionType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].transactionType, - message.vali_stepTransaction_transactionType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].data, - message.vali_stepTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].to, - message.vali_stepTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].value, - message.vali_stepTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - stepTransaction.items[1].chainId, - message.vali_stepTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].gasLimit, - message.vali_stepTransaction_gasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].gasPrice, - message.vali_stepTransaction_gasPrice - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_advanceRoutesLiFi_1); - console.log(message.vali_advanceRoutesLiFi_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_advanceRoutesLiFi_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid fromTokenAddress details on the optimism network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await optimismDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.invalidTokenAddress_optimismUSDC; // Invalid fromTokenAddress - let toTokenAddress = data.tokenAddress_optimismUSDT; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.optimism_chainid; - - await optimismDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_2); - assert.fail(message.fail_exchangeOffers_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_exchangeOffers_4); - console.log(message.vali_exchangeOffers_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without fromTokenAddress details on the optimism network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await optimismDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let toTokenAddress = data.tokenAddress_optimismUSDT; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.optimism_chainid; - - await optimismDataService.getExchangeOffers({ - fromAddress, - fromChainId, - // without fromTokenAddress - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_3); - assert.fail(message.fail_exchangeOffers_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_exchangeOffers_5); - console.log(message.vali_exchangeOffers_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid toTokenAddress details on the optimism network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await optimismDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_optimismUSDC; - let toTokenAddress = data.invalidTokenAddress_optimismUSDT; // Invalid toTokenAddress - let fromAmount = data.exchange_offer_value; - let fromChainId = data.optimism_chainid; - - await optimismDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_4); - assert.fail(message.fail_exchangeOffers_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_exchangeOffers_6); - console.log(message.vali_exchangeOffers_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without toTokenAddress details on the optimism network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await optimismDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_optimismUSDC; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.optimism_chainid; - - await optimismDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - // without toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_5); - assert.fail(message.fail_exchangeOffers_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_exchangeOffers_7); - console.log(message.vali_exchangeOffers_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid fromAmount on the optimism network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await optimismDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_optimismUSDC; - let toTokenAddress = data.tokenAddress_optimismUSDT; - let fromAmount = data.invalidValue; // invalid fromAmount - let fromChainId = data.optimism_chainid; - - await optimismDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_6); - assert.fail(message.fail_exchangeOffers_6); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_exchangeOffers_8); - console.log(message.vali_exchangeOffers_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with decimal fromAmount on the optimism network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await optimismDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_optimismUSDC; - let toTokenAddress = data.tokenAddress_optimismUSDT; - let fromAmount = data.exchange_offer_decimal_value; // decimal fromAmount - let fromChainId = data.optimism_chainid; - - await optimismDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_7); - assert.fail(message.fail_exchangeOffers_7); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_exchangeOffers_9); - console.log(message.vali_exchangeOffers_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without fromAmount on the optimism network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await optimismDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_optimismUSDC; - let toTokenAddress = data.tokenAddress_optimismUSDT; - let fromChainId = data.optimism_chainid; - - await optimismDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - // without fromAmount - }); - - addContext(test, message.fail_exchangeOffers_9); - assert.fail(message.fail_exchangeOffers_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.IsBigNumberish === - constant.invalid_bignumber_2 - ) { - addContext(test, message.vali_exchangeOffers_11); - console.log(message.vali_exchangeOffers_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromChainId detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_2); - assert.fail(message.fail_crossChainQuotes_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_1) { - addContext(test, message.vali_crossChainQuotes_2); - console.log(message.vali_crossChainQuotes_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without toChainId detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.optimism_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_3); - assert.fail(message.fail_crossChainQuotes_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_2) { - addContext(test, message.vali_crossChainQuotes_3); - console.log(message.vali_crossChainQuotes_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid fromTokenAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.invalidTokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_4); - assert.fail(message.fail_crossChainQuotes_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_4); - console.log(message.vali_crossChainQuotes_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect fromTokenAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.incorrectTokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_5); - assert.fail(message.fail_crossChainQuotes_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_5); - console.log(message.vali_crossChainQuotes_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromTokenAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_6); - assert.fail(message.fail_crossChainQuotes_6); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_6); - console.log(message.vali_crossChainQuotes_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid toTokenAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.invalidTokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_7); - assert.fail(message.fail_crossChainQuotes_7); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_7); - console.log(message.vali_crossChainQuotes_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect toTokenAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.incorrectTokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_8); - assert.fail(message.fail_crossChainQuotes_8); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_8); - console.log(message.vali_crossChainQuotes_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without toTokenAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_9); - assert.fail(message.fail_crossChainQuotes_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_9); - console.log(message.vali_crossChainQuotes_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid fromAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.invalidSender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_10); - assert.fail(message.fail_crossChainQuotes_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_10); - assert.fail(message.vali_crossChainQuotes_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect fromAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.incorrectSender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_11); - assert.fail(message.fail_crossChainQuotes_11); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_11); - assert.fail(message.vali_crossChainQuotes_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - addContext(test, message.fail_crossChainQuotes_12); - assert.fail(message.fail_crossChainQuotes_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromAmount detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - }; - - await optimismDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_12); - assert.fail(message.fail_crossChainQuotes_12); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_12); - console.log(message.vali_crossChainQuotes_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromChainId detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_2); - assert.fail(fail_advanceRoutesLiFi_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_1) { - addContext(test, message.vali_advanceRoutesLiFi_2); - console.log(message.vali_advanceRoutesLiFi_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without toChainId detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.optimism_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_3); - assert.fail(fail_advanceRoutesLiFi_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_2) { - addContext(test, message.vali_advanceRoutesLifi_3); - console.log(message.vali_advanceRoutesLifi_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid fromTokenAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.invalidTokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_4); - assert.fail(fail_advanceRoutesLiFi_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_4); - console.log(message.vali_advanceRoutesLifi_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect fromTokenAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.incorrectTokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_5); - assert.fail(fail_advanceRoutesLiFi_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_5); - console.log(message.vali_advanceRoutesLifi_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromTokenAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_6); - assert.fail(fail_advanceRoutesLiFi_6); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_6); - console.log(message.vali_advanceRoutesLifi_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid toTokenAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.invalidTokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_7); - assert.fail(fail_advanceRoutesLiFi_7); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_7); - console.log(message.vali_advanceRoutesLifi_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect toTokenAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.incorrectTokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_8); - assert.fail(fail_advanceRoutesLiFi_8); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_8); - console.log(message.vali_advanceRoutesLifi_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without toTokenAddress detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await optimismDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_9); - assert.fail(fail_advanceRoutesLiFi_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_9); - console.log(message.vali_advanceRoutesLifi_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromAmount detail on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.optimism_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_optimismUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - }; - - await optimismDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_10); - assert.fail(fail_advanceRoutesLiFi_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.IsBigNumberish === - constant.invalid_bignumber_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_10); - console.log(message.vali_advanceRoutesLifi_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/swap/xdai.spec.js b/test/specs/mainnet/swap/xdai.spec.js deleted file mode 100644 index 30bed90..0000000 --- a/test/specs/mainnet/swap/xdai.spec.js +++ /dev/null @@ -1,2237 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { utils, constants, BigNumber } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let xdaiMainNetSdk; -let xdaiEtherspotWalletAddress; -let xdaiNativeAddress = null; -let xdaiDataService; -let runTest; - -describe('The PrimeSDK, when get cross chain quotes and get advance routes LiFi transaction details with xdai network on the MainNet (with old wallet)', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - xdaiMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - xdaiEtherspotWalletAddress = - await xdaiMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - xdaiEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - xdaiDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await xdaiDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === xdaiNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_xdaiUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the Exchange offers response with ERC20 to ERC20 and valid details on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - let offers; - try { - let fromChainId = data.xdai_chainid; - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let toTokenAddress = data.tokenAddress_xdaiUSDT; - let fromAmount = data.exchange_offer_value; - - offers = await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - if (offers.length > 0) { - for (let i = 0; i < offers.length; i++) { - try { - assert.isNotEmpty( - offers[i].provider, - message.vali_exchangeOffers_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].receiveAmount, - message.vali_exchangeOffers_receiveAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - offers[i].exchangeRate, - message.vali_exchangeOffers_exchangeRate - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].transactions, - message.vali_exchangeOffers_transactions - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } else { - addContext(test, message.vali_exchangeOffers_3); - console.log(message.vali_exchangeOffers_3); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Exchange offers response with ERC20 to Native Token and valid details on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - let offers; - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let toTokenAddress = constants.AddressZero; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.xdai_chainid; - - offers = await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - if (offers.length > 0) { - for (let i = 0; i < offers.length; i++) { - try { - assert.isNotEmpty( - offers[i].provider, - message.vali_exchangeOffers_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].receiveAmount, - message.vali_exchangeOffers_receiveAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - offers[i].exchangeRate, - message.vali_exchangeOffers_exchangeRate - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - offers[i].transactions, - message.vali_exchangeOffers_transactions - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } else { - addContext(test, message.vali_exchangeOffers_3); - console.log(message.vali_exchangeOffers_3); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the getCrossChainQuotes response with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - let quotes; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - quotes = - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - if (quotes.items.length > 0) { - try { - assert.isNotEmpty( - quotes.items[0].provider, - message.vali_crossChainQuotes_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.data, - message.vali_crossChainQuotes_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.to, - message.vali_crossChainQuotes_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.value, - message.vali_crossChainQuotes_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].transaction.from, - message.vali_crossChainQuotes_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].transaction.chainId, - message.vali_crossChainQuotes_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_crossChainQuotes_1); - console.log(message.vali_crossChainQuotes_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the getAdvanceRoutesLiFi response with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - let quotes; - let stepTransaction; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - quotes = - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - if (quotes.items.length > 0) { - const quote = quotes.items[0]; // Selected the first route - stepTransaction = await xdaiDataService.getStepTransaction({ - route: quote, - account: data.sender, - }); - - try { - assert.isNotEmpty( - quotes.items[0].id, - message.vali_advanceRoutesLiFi_id - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].fromChainId, - message.vali_advanceRoutesLiFi_fromChainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAmountUSD, - message.vali_advanceRoutesLiFi_fromAmountUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAmount, - message.vali_advanceRoutesLiFi_fromAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromToken, - message.vali_advanceRoutesLiFi_fromToken - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].fromAddress, - message.vali_advanceRoutesLiFi_fromAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - quotes.items[0].toChainId, - message.vali_advanceRoutesLiFi_toChainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmountUSD, - message.vali_advanceRoutesLiFi_toAmountUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmount, - message.vali_advanceRoutesLiFi_toAmount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAmountMin, - message.vali_advanceRoutesLiFi_toAmountMin - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toToken, - message.vali_advanceRoutesLiFi_toToken - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].toAddress, - message.vali_advanceRoutesLiFi_toAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes.items[0].gasCostUSD, - message.vali_advanceRoutesLiFi_gasCostUSD - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].data, - message.vali_stepTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].value, - message.vali_stepTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].to, - message.vali_stepTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - stepTransaction.items[0].chainId, - message.vali_stepTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[0].transactionType, - message.vali_stepTransaction_transactionType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].transactionType, - message.vali_stepTransaction_transactionType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].data, - message.vali_stepTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].to, - message.vali_stepTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].value, - message.vali_stepTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - stepTransaction.items[1].chainId, - message.vali_stepTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].gasLimit, - message.vali_stepTransaction_gasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - stepTransaction.items[1].gasPrice, - message.vali_stepTransaction_gasPrice - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_advanceRoutesLiFi_1); - console.log(message.vali_advanceRoutesLiFi_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_advanceRoutesLiFi_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid fromTokenAddress details on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].address, - message.vali_exchangeOffers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].chainId, - message.vali_exchangeOffers_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].name, - message.vali_exchangeOffers_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].symbol, - message.vali_exchangeOffers_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - exchangeSupportedAssets.items[0].decimals, - message.vali_exchangeOffers_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - exchangeSupportedAssets.items[0].logoURI, - message.vali_exchangeOffers_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.invalidTokenAddress_xdaiUSDC; // Invalid fromTokenAddress - let toTokenAddress = data.tokenAddress_xdaiUSDT; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_2); - assert.fail(message.fail_exchangeOffers_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_exchangeOffers_4); - console.log(message.vali_exchangeOffers_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without fromTokenAddress details on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let toTokenAddress = data.tokenAddress_xdaiUSDT; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - // without fromTokenAddress - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_3); - assert.fail(message.fail_exchangeOffers_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_exchangeOffers_5); - console.log(message.vali_exchangeOffers_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid toTokenAddress details on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let toTokenAddress = data.invalidTokenAddress_xdaiUSDT; // Invalid toTokenAddress - let fromAmount = data.exchange_offer_value; - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_4); - assert.fail(message.fail_exchangeOffers_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_exchangeOffers_6); - console.log(message.vali_exchangeOffers_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without toTokenAddress details on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let fromAmount = data.exchange_offer_value; - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - // without toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_5); - assert.fail(message.fail_exchangeOffers_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_exchangeOffers_7); - console.log(message.vali_exchangeOffers_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with invalid fromAmount on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let toTokenAddress = data.tokenAddress_xdaiUSDT; - let fromAmount = data.invalidValue; // invalid fromAmount - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_6); - assert.fail(message.fail_exchangeOffers_6); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_exchangeOffers_8); - console.log(message.vali_exchangeOffers_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response with decimal fromAmount on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let toTokenAddress = data.tokenAddress_xdaiUSDT; - let fromAmount = data.exchange_offer_decimal_value; // decimal fromAmount - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - fromAmount: BigNumber.from(fromAmount), - }); - - addContext(test, message.fail_exchangeOffers_7); - assert.fail(message.fail_exchangeOffers_7); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_exchangeOffers_9); - console.log(message.vali_exchangeOffers_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange offers response without fromAmount on the xdai network', async function () { - var test = this; - let exchangeSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - exchangeSupportedAssets = - await xdaiDataService.getExchangeSupportedAssets({ - page: 1, - limit: 100, - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - - try { - if (exchangeSupportedAssets.items.length > 0) { - addContext(test, message.vali_exchangeOffers_1); - console.log(message.vali_exchangeOffers_1); - } else { - addContext(test, message.vali_exchangeOffers_2); - console.error(message.vali_exchangeOffers_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - let fromAddress = data.sender; - let fromTokenAddress = data.tokenAddress_xdaiUSDC; - let toTokenAddress = data.tokenAddress_xdaiUSDT; - let fromChainId = data.xdai_chainid; - - await xdaiDataService.getExchangeOffers({ - fromAddress, - fromChainId, - fromTokenAddress, - toTokenAddress, - // without fromAmount - }); - - addContext(test, message.fail_exchangeOffers_9); - assert.fail(message.fail_exchangeOffers_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.IsBigNumberish === - constant.invalid_bignumber_2 - ) { - addContext(test, message.vali_exchangeOffers_11); - console.log(message.vali_exchangeOffers_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeOffers_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeOffers_insufficientBalance); - console.warn(message.exchangeOffers_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromChainId detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_2); - assert.fail(message.fail_crossChainQuotes_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_1) { - addContext(test, message.vali_crossChainQuotes_2); - console.log(message.vali_crossChainQuotes_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without toChainId detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_3); - assert.fail(message.fail_crossChainQuotes_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_2) { - addContext(test, message.vali_crossChainQuotes_3); - console.log(message.vali_crossChainQuotes_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid fromTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.invalidTokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_4); - assert.fail(message.fail_crossChainQuotes_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_4); - console.log(message.vali_crossChainQuotes_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect fromTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.incorrectTokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_5); - assert.fail(message.fail_crossChainQuotes_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_5); - console.log(message.vali_crossChainQuotes_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_6); - assert.fail(message.fail_crossChainQuotes_6); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_crossChainQuotes_6); - console.log(message.vali_crossChainQuotes_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid toTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.invalidTokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_7); - assert.fail(message.fail_crossChainQuotes_7); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_7); - console.log(message.vali_crossChainQuotes_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect toTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.incorrectTokenAddress_maticUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_8); - assert.fail(message.fail_crossChainQuotes_8); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_8); - console.log(message.vali_crossChainQuotes_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without toTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - fromAddress: data.sender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_9); - assert.fail(message.fail_crossChainQuotes_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_crossChainQuotes_9); - console.log(message.vali_crossChainQuotes_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with invalid fromAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.invalidSender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_10); - assert.fail(message.fail_crossChainQuotes_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_10); - assert.fail(message.vali_crossChainQuotes_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response with incorrect fromAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.incorrectSender, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_11); - assert.fail(message.fail_crossChainQuotes_11); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_11); - assert.fail(message.vali_crossChainQuotes_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - addContext(test, message.fail_crossChainQuotes_12); - assert.fail(message.fail_crossChainQuotes_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the getCrossChainQuotes response without fromAmount detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAddress: data.sender, - }; - - await xdaiDataService.getCrossChainQuotes(quoteRequestPayload); - - addContext(test, message.fail_crossChainQuotes_12); - assert.fail(message.fail_crossChainQuotes_12); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_crossChainQuotes_12); - console.log(message.vali_crossChainQuotes_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_crossChainQuotes_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.crossChainQuotes_insufficientBalance); - console.warn(message.crossChainQuotes_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromChainId detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_2); - assert.fail(fail_advanceRoutesLiFi_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_1) { - addContext(test, message.vali_advanceRoutesLiFi_2); - console.log(message.vali_advanceRoutesLiFi_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without toChainId detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_3); - assert.fail(fail_advanceRoutesLiFi_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if (errorResponse[0].property === constant.invalid_chainid_2) { - addContext(test, message.vali_advanceRoutesLifi_3); - console.log(message.vali_advanceRoutesLifi_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid fromTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.invalidTokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_4); - assert.fail(fail_advanceRoutesLiFi_4); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_4); - console.log(message.vali_advanceRoutesLifi_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect fromTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.incorrectTokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_5); - assert.fail(fail_advanceRoutesLiFi_5); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_5); - console.log(message.vali_advanceRoutesLifi_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - toTokenAddress: data.tokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_6); - assert.fail(fail_advanceRoutesLiFi_6); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_2 - ) { - addContext(test, message.vali_advanceRoutesLifi_6); - console.log(message.vali_advanceRoutesLifi_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid toTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.invalidTokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_7); - assert.fail(fail_advanceRoutesLiFi_7); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_7); - console.log(message.vali_advanceRoutesLifi_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect toTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.incorrectTokenAddress_maticUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_8); - assert.fail(fail_advanceRoutesLiFi_8); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_8); - console.log(message.vali_advanceRoutesLifi_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without toTokenAddress detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - fromAmount: utils.parseUnits(data.swap_value, 6), - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_9); - assert.fail(fail_advanceRoutesLiFi_9); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_9); - console.log(message.vali_advanceRoutesLifi_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the getAdvanceRoutesLiFi response without fromAmount detail on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let quoteRequestPayload; - try { - quoteRequestPayload = { - fromAddress: data.sender, - fromChainId: data.xdai_chainid, - toChainId: data.matic_chainid, - fromTokenAddress: data.tokenAddress_xdaiUSDC, - toTokenAddress: data.tokenAddress_maticUSDC, - }; - - await xdaiDataService.getAdvanceRoutesLiFi(quoteRequestPayload); - - addContext(test, fail_advanceRoutesLiFi_10); - assert.fail(fail_advanceRoutesLiFi_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.IsBigNumberish === - constant.invalid_bignumber_3 - ) { - addContext(test, message.vali_advanceRoutesLifi_10); - console.log(message.vali_advanceRoutesLifi_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(fail_advanceRoutesLiFi_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.advanceRoutesLiFi_insufficientBalance); - console.warn(message.advanceRoutesLiFi_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/transactionHistory/arbitrum.spec.js b/test/specs/mainnet/transactionHistory/arbitrum.spec.js deleted file mode 100644 index 297f7ce..0000000 --- a/test/specs/mainnet/transactionHistory/arbitrum.spec.js +++ /dev/null @@ -1,1806 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { ethers, utils, providers } from 'ethers'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import helper from '../../../utils/helper.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let arbitrumMainNetSdk; -let arbitrumEtherspotWalletAddress; -let arbitrumNativeAddress = null; -let arbitrumDataService; -let runTest; - -describe('The PrimeSDK, when get the single transaction and multiple transaction details with arbitrum network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - arbitrumMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.arbitrum_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.arbitrum_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - arbitrumMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - arbitrumEtherspotWalletAddress = - await arbitrumMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - arbitrumEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - arbitrumDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await arbitrumDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.arbitrum_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === arbitrumNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_arbitrumUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the transaction history of the native token transaction on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await arbitrumMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await arbitrumMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await arbitrumMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // get single transaction history details - let transactionHash; - let singleTransaction; - - if (!(userOpsReceipt === null)) { - try { - transactionHash = userOpsReceipt.receipt.transactionHash; - singleTransaction = await arbitrumDataService.getTransaction({ - hash: transactionHash, - chainId: Number(data.arbitrum_chainid), - }); - - try { - assert.isNumber( - singleTransaction.chainId, - message.vali_getTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.hash, - message.vali_getTransaction_hash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.nonce, - message.vali_getTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.blockHash, - message.vali_getTransaction_blockHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.blockNumber, - message.vali_getTransaction_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.from, - message.vali_getTransaction_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.to, - message.vali_getTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.value, - message.vali_getTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.gasPrice, - message.vali_getTransaction_gasPrice - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.gasLimit, - message.vali_getTransaction_gasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.input, - message.vali_getTransaction_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.status, - message.vali_getTransaction_status - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.blockExplorerUrl, - message.vali_getTransaction_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.transactionIndex, - message.vali_getTransaction_transactionIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.gasUsed, - message.vali_getTransaction_gasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].transactionIndex, - message.vali_getTransaction_log_transactionIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].blockNumber, - message.vali_getTransaction_log_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].transactionHash, - message.vali_getTransaction_log_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].address, - message.vali_getTransaction_log_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].topics, - message.vali_getTransaction_log_topics - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].data, - message.vali_getTransaction_log_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].logIndex, - message.vali_getTransaction_log_logIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].blockHash, - message.vali_getTransaction_log_blockHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransaction_1); - } - } else { - addContext(test, message.vali_getTransaction_1); - console.log(message.vali_getTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response with random transaction in arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // Fetching historical transactions - let transactions; - let randomTransaction; - - try { - transactions = await arbitrumDataService.getTransactions({ - chainId: Number(data.arbitrum_chainid), - account: data.sender, - }); - - randomTransaction = - Math.floor(Math.random() * (transactions.transactions.length - 1)) + - 1; - - try { - assert.isNumber( - transactions.transactions[randomTransaction].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].preVerificationGas, - message.vali_getTransactions_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response of the native transaction in arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await arbitrumMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await arbitrumMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await arbitrumMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // wait for the 10 seconds - helper.wait(10000); - - // Fetching historical transactions - let transactions; - try { - transactions = await arbitrumDataService.getTransactions({ - chainId: Number(data.arbitrum_chainid), - account: data.sender, - page: 1, - limit: 10, - }); - - if (userOpsReceipt != null) { - try { - assert.isNumber( - transactions.transactions[0].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].blockNumber, - message.vali_getTransactions_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].target, - message.vali_getTransactions_target - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].preVerificationGas, - 'The preVerificationGas value is empty in the get transactions response.' - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].from, - message.vali_getTransactions_nativeTransfers_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].to, - message.vali_getTransactions_nativeTransfers_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].value, - message.vali_getTransactions_nativeTransfers_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].asset, - message.vali_getTransactions_nativeTransfers_asset - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].address, - message.vali_getTransactions_nativeTransfers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nativeTransfers[0].decimal, - message.vali_getTransactions_nativeTransfers_decimal - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].data, - message.vali_getTransactions_nativeTransfers_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_getTransactions_1); - console.log(message.vali_getTransactions_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response of the erc20 transaction in arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_2); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_3); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_4); - } - - // get transfer From encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_5); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await arbitrumMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await arbitrumMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await arbitrumMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // wait for the 10 seconds - helper.wait(10000); - - // Fetching historical transactions - let transactions; - try { - transactions = await arbitrumDataService.getTransactions({ - chainId: Number(data.arbitrum_chainid), - account: data.sender, - page: 1, - limit: 10, - }); - - if (userOpsReceipt != null) { - try { - assert.isNumber( - transactions.transactions[0].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].blockNumber, - message.vali_getTransactions_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].target, - message.vali_getTransactions_target - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].preVerificationGas, - message.vali_getTransactions_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].from, - message.vali_getTransactions_erc20Transfers_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].to, - message.vali_getTransactions_erc20Transfers_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].value, - message.vali_getTransactions_erc20Transfers_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].asset, - message.vali_getTransactions_erc20Transfers_asset - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].address, - message.vali_getTransactions_erc20Transfers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].erc20Transfers[0].decimal, - message.vali_getTransactions_erc20Transfers_decimal - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].data, - message.vali_getTransactions_erc20Transfers_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_getTransactions_1); - console.log(message.vali_getTransactions_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction history response with invalid hash on arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // Fetching a single transaction - let transaction; - try { - transaction = await arbitrumDataService.getTransaction({ - hash: data.incorrect_hash, // Incorrect Transaction Hash - chainId: Number(data.arbitrum_chainid), - }); - - if (transaction === null || Object.keys(transaction).length === 0) { - addContext(test, message.vali_getTransactions_2); - console.log(message.vali_getTransactions_2); - } else { - addContext(test, message.fail_getTransactions_6); - assert.fail(message.fail_getTransactions_6); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction history response when hash hex is not with 32 size on arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // Fetching a single transaction - try { - await arbitrumDataService.getTransaction({ - hash: data.invalid_hash, // Invalid Transaction Hash - chainId: Number(data.arbitrum_chainid), - }); - - addContext(test, message.fail_getTransactions_7); - assert.fail(message.fail_getTransactions_7); - } catch (e) { - if (e.errors[0].constraints.isHex === constant.hash_32) { - addContext(test, message.vali_getTransactions_3); - console.log(message.vali_getTransactions_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with invalid chainid in arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let transactions = await arbitrumDataService.getTransactions({ - chainId: Number(data.invalid_arbitrum_chainid), - account: data.sender, - }); - - if (transactions.transactions.length === 0) { - addContext(test, message.vali_getTransactions_4); - console.log(message.vali_getTransactions_4); - } else { - addContext(test, message.fail_getTransactions_8); - assert.fail(message.fail_getTransactions_8); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with invalid account in arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let a = await arbitrumDataService.getTransactions({ - chainId: Number(data.arbitrum_chainid), - account: data.invalidSender, - }); - - addContext(test, message.fail_getTransactions_10); - assert.fail(message.fail_getTransactions_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_getTransactions_6); - console.log(message.vali_getTransactions_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with incorrect account in arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await arbitrumDataService.getTransactions({ - chainId: Number(data.arbitrum_chainid), - account: data.incorrectSender, - }); - - addContext(test, message.fail_getTransactions_11); - assert.fail(message.fail_getTransactions_11); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_getTransactions_7); - console.log(message.vali_getTransactions_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/transactionHistory/matic.spec.js b/test/specs/mainnet/transactionHistory/matic.spec.js deleted file mode 100644 index dfe0e53..0000000 --- a/test/specs/mainnet/transactionHistory/matic.spec.js +++ /dev/null @@ -1,1806 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { ethers, utils, providers } from 'ethers'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import helper from '../../../utils/helper.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let maticMainNetSdk; -let maticEtherspotWalletAddress; -let maticNativeAddress = null; -let maticDataService; -let runTest; - -describe('The PrimeSDK, when get the single transaction and multiple transaction details with matic network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - maticMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.matic_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.matic_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - maticMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - maticEtherspotWalletAddress = - await maticMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - maticEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - maticDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await maticDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.matic_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === maticNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_maticUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the transaction history of the native token transaction on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await maticMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await maticMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await maticMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // get single transaction history details - let transactionHash; - let singleTransaction; - - if (!(userOpsReceipt === null)) { - try { - transactionHash = userOpsReceipt.receipt.transactionHash; - singleTransaction = await maticDataService.getTransaction({ - hash: transactionHash, - chainId: Number(data.matic_chainid), - }); - - try { - assert.isNumber( - singleTransaction.chainId, - message.vali_getTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.hash, - message.vali_getTransaction_hash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.nonce, - message.vali_getTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.blockHash, - message.vali_getTransaction_blockHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.blockNumber, - message.vali_getTransaction_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.from, - message.vali_getTransaction_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.to, - message.vali_getTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.value, - message.vali_getTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.gasPrice, - message.vali_getTransaction_gasPrice - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.gasLimit, - message.vali_getTransaction_gasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.input, - message.vali_getTransaction_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.status, - message.vali_getTransaction_status - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.blockExplorerUrl, - message.vali_getTransaction_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.transactionIndex, - message.vali_getTransaction_transactionIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.gasUsed, - message.vali_getTransaction_gasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].transactionIndex, - message.vali_getTransaction_log_transactionIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].blockNumber, - message.vali_getTransaction_log_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].transactionHash, - message.vali_getTransaction_log_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].address, - message.vali_getTransaction_log_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].topics, - message.vali_getTransaction_log_topics - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].data, - message.vali_getTransaction_log_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].logIndex, - message.vali_getTransaction_log_logIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].blockHash, - message.vali_getTransaction_log_blockHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransaction_1); - } - } else { - addContext(test, message.vali_getTransaction_1); - console.log(message.vali_getTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response with random transaction in matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // Fetching historical transactions - let transactions; - let randomTransaction; - - try { - transactions = await maticDataService.getTransactions({ - chainId: Number(data.matic_chainid), - account: data.sender, - }); - - randomTransaction = - Math.floor(Math.random() * (transactions.transactions.length - 1)) + - 1; - - try { - assert.isNumber( - transactions.transactions[randomTransaction].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].preVerificationGas, - message.vali_getTransactions_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response of the native transaction in matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await maticMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await maticMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await maticMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // wait for the 10 seconds - helper.wait(10000); - - // Fetching historical transactions - let transactions; - try { - transactions = await maticDataService.getTransactions({ - chainId: Number(data.matic_chainid), - account: data.sender, - page: 1, - limit: 10, - }); - - if (userOpsReceipt != null) { - try { - assert.isNumber( - transactions.transactions[0].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].blockNumber, - message.vali_getTransactions_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].target, - message.vali_getTransactions_target - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].preVerificationGas, - 'The preVerificationGas value is empty in the get transactions response.' - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].from, - message.vali_getTransactions_nativeTransfers_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].to, - message.vali_getTransactions_nativeTransfers_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].value, - message.vali_getTransactions_nativeTransfers_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].asset, - message.vali_getTransactions_nativeTransfers_asset - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].address, - message.vali_getTransactions_nativeTransfers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nativeTransfers[0].decimal, - message.vali_getTransactions_nativeTransfers_decimal - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].data, - message.vali_getTransactions_nativeTransfers_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_getTransactions_1); - console.log(message.vali_getTransactions_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response of the erc20 transaction in matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_2); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_3); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_4); - } - - // get transfer From encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_5); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await maticMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await maticMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await maticMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // wait for the 10 seconds - helper.wait(10000); - - // Fetching historical transactions - let transactions; - try { - transactions = await maticDataService.getTransactions({ - chainId: Number(data.matic_chainid), - account: data.sender, - page: 1, - limit: 10, - }); - - if (userOpsReceipt != null) { - try { - assert.isNumber( - transactions.transactions[0].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].blockNumber, - message.vali_getTransactions_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].target, - message.vali_getTransactions_target - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].preVerificationGas, - message.vali_getTransactions_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].from, - message.vali_getTransactions_erc20Transfers_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].to, - message.vali_getTransactions_erc20Transfers_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].value, - message.vali_getTransactions_erc20Transfers_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].asset, - message.vali_getTransactions_erc20Transfers_asset - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].address, - message.vali_getTransactions_erc20Transfers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].erc20Transfers[0].decimal, - message.vali_getTransactions_erc20Transfers_decimal - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].data, - message.vali_getTransactions_erc20Transfers_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_getTransactions_1); - console.log(message.vali_getTransactions_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction history response with invalid hash on matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // Fetching a single transaction - let transaction; - try { - transaction = await maticDataService.getTransaction({ - hash: data.incorrect_hash, // Incorrect Transaction Hash - chainId: Number(data.matic_chainid), - }); - - if (transaction === null || Object.keys(transaction).length === 0) { - addContext(test, message.vali_getTransactions_2); - console.log(message.vali_getTransactions_2); - } else { - addContext(test, message.fail_getTransactions_6); - assert.fail(message.fail_getTransactions_6); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction history response when hash hex is not with 32 size on matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // Fetching a single transaction - try { - await maticDataService.getTransaction({ - hash: data.invalid_hash, // Invalid Transaction Hash - chainId: Number(data.matic_chainid), - }); - - addContext(test, message.fail_getTransactions_7); - assert.fail(message.fail_getTransactions_7); - } catch (e) { - if (e.errors[0].constraints.isHex === constant.hash_32) { - addContext(test, message.vali_getTransactions_3); - console.log(message.vali_getTransactions_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with invalid chainid in matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let transactions = await maticDataService.getTransactions({ - chainId: Number(data.invalid_matic_chainid), - account: data.sender, - }); - - if (transactions.transactions.length === 0) { - addContext(test, message.vali_getTransactions_4); - console.log(message.vali_getTransactions_4); - } else { - addContext(test, message.fail_getTransactions_8); - assert.fail(message.fail_getTransactions_8); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with invalid account in matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let a = await maticDataService.getTransactions({ - chainId: Number(data.matic_chainid), - account: data.invalidSender, - }); - - addContext(test, message.fail_getTransactions_10); - assert.fail(message.fail_getTransactions_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_getTransactions_6); - console.log(message.vali_getTransactions_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with incorrect account in matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await maticDataService.getTransactions({ - chainId: Number(data.matic_chainid), - account: data.incorrectSender, - }); - - addContext(test, message.fail_getTransactions_11); - assert.fail(message.fail_getTransactions_11); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_getTransactions_7); - console.log(message.vali_getTransactions_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/transactionHistory/newWallet_transactionHistory.spec.js b/test/specs/mainnet/transactionHistory/newWallet_transactionHistory.spec.js new file mode 100644 index 0000000..9720124 --- /dev/null +++ b/test/specs/mainnet/transactionHistory/newWallet_transactionHistory.spec.js @@ -0,0 +1,1766 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers, utils, providers } from 'ethers'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import helper from '../../../utils/helper.js'; +import { + randomChainId, + randomChainName, + randomProviderNetwork, + randomTokenAddress, +} from '../../../utils/sharedData_mainnet.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let mainnetPrimeSdk; +let nativeAddress = null; +let dataService; +let runTest; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Validate the single transaction and multiple transaction details on the MainNet (with new wallet)', function () { + before(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }); + + it( + 'SMOKE: Validate the transaction history of the native token transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // get single transaction history details + let transactionHash; + let singleTransaction; + + if (!(userOpsReceipt === null)) { + try { + transactionHash = userOpsReceipt.receipt.transactionHash; + singleTransaction = await dataService.getTransaction({ + hash: transactionHash, + chainId: Number(randomChainId), + }); + + try { + assert.isNumber( + singleTransaction.chainId, + message.vali_getTransaction_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.hash, + message.vali_getTransaction_hash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.nonce, + message.vali_getTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.blockHash, + message.vali_getTransaction_blockHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.blockNumber, + message.vali_getTransaction_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.from, + message.vali_getTransaction_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.to, + message.vali_getTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.value, + message.vali_getTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.gasPrice, + message.vali_getTransaction_gasPrice + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.gasLimit, + message.vali_getTransaction_gasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.input, + message.vali_getTransaction_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.status, + message.vali_getTransaction_status + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.blockExplorerUrl, + message.vali_getTransaction_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.transactionIndex, + message.vali_getTransaction_transactionIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.gasUsed, + message.vali_getTransaction_gasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.logs[0].transactionIndex, + message.vali_getTransaction_log_transactionIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.logs[0].blockNumber, + message.vali_getTransaction_log_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].transactionHash, + message.vali_getTransaction_log_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].address, + message.vali_getTransaction_log_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].topics, + message.vali_getTransaction_log_topics + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].data, + message.vali_getTransaction_log_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.logs[0].logIndex, + message.vali_getTransaction_log_logIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].blockHash, + message.vali_getTransaction_log_blockHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransaction_1); + } + } else { + addContext(test, message.vali_getTransaction_1); + console.log(message.vali_getTransaction_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transactions history response with random transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // Fetching historical transactions + let transactions; + let randomTransaction; + + try { + transactions = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.sender, + }); + + randomTransaction = + Math.floor( + Math.random() * (transactions.transactions.length - 1) + ) + 1; + + try { + assert.isNumber( + transactions.transactions[randomTransaction].chainId, + message.vali_getTransactions_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].sender, + message.vali_getTransactions_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].transactionHash, + message.vali_getTransactions_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].userOpHash, + message.vali_getTransactions_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].actualGasCost, + message.vali_getTransactions_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].actualGasUsed, + message.vali_getTransactions_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].success, + message.vali_getTransactions_success + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].timestamp, + message.vali_getTransactions_timestamp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].paymaster, + message.vali_getTransactions_paymaster + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].value, + message.vali_getTransactions_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].blockExplorerUrl, + message.vali_getTransactions_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].input, + message.vali_getTransactions_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].nonce, + message.vali_getTransactions_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].initCode, + message.vali_getTransactions_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].callData, + message.vali_getTransactions_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction] + .verificationGasLimit, + message.vali_getTransactions_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].preVerificationGas, + message.vali_getTransactions_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].maxFeePerGas, + message.vali_getTransactions_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction] + .maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transactions history response of the native transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // wait for the 10 seconds + helper.wait(10000); + + // Fetching historical transactions + let transactions; + try { + transactions = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.sender, + page: 1, + limit: 10, + }); + + if (userOpsReceipt != null) { + try { + assert.isNumber( + transactions.transactions[0].chainId, + message.vali_getTransactions_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].blockNumber, + message.vali_getTransactions_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].sender, + message.vali_getTransactions_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].target, + message.vali_getTransactions_target + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].transactionHash, + message.vali_getTransactions_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].userOpHash, + message.vali_getTransactions_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasCost, + message.vali_getTransactions_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasUsed, + message.vali_getTransactions_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].success, + message.vali_getTransactions_success + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].timestamp, + message.vali_getTransactions_timestamp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].paymaster, + message.vali_getTransactions_paymaster + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].value, + message.vali_getTransactions_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].blockExplorerUrl, + message.vali_getTransactions_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].input, + message.vali_getTransactions_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].nonce, + message.vali_getTransactions_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].initCode, + message.vali_getTransactions_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].callData, + message.vali_getTransactions_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].verificationGasLimit, + message.vali_getTransactions_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].preVerificationGas, + 'The preVerificationGas value is empty in the get transactions response.' + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxFeePerGas, + message.vali_getTransactions_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].from, + message.vali_getTransactions_nativeTransfers_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].to, + message.vali_getTransactions_nativeTransfers_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].value, + message.vali_getTransactions_nativeTransfers_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].asset, + message.vali_getTransactions_nativeTransfers_asset + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].address, + message.vali_getTransactions_nativeTransfers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].nativeTransfers[0].decimal, + message.vali_getTransactions_nativeTransfers_decimal + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].data, + message.vali_getTransactions_nativeTransfers_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_getTransactions_1); + console.log(message.vali_getTransactions_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transactions history response of the erc20 transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_2); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_3); + } + + // get transfer From encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_5); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // wait for the 10 seconds + helper.wait(10000); + + // Fetching historical transactions + let transactions; + try { + transactions = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.sender, + page: 1, + limit: 10, + }); + + if (userOpsReceipt != null) { + try { + assert.isNumber( + transactions.transactions[0].chainId, + message.vali_getTransactions_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].sender, + message.vali_getTransactions_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].transactionHash, + message.vali_getTransactions_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].userOpHash, + message.vali_getTransactions_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasCost, + message.vali_getTransactions_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasUsed, + message.vali_getTransactions_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].success, + message.vali_getTransactions_success + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].timestamp, + message.vali_getTransactions_timestamp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].paymaster, + message.vali_getTransactions_paymaster + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].value, + message.vali_getTransactions_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].blockExplorerUrl, + message.vali_getTransactions_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].input, + message.vali_getTransactions_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].nonce, + message.vali_getTransactions_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].initCode, + message.vali_getTransactions_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].callData, + message.vali_getTransactions_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].verificationGasLimit, + message.vali_getTransactions_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].preVerificationGas, + message.vali_getTransactions_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxFeePerGas, + message.vali_getTransactions_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].from, + message.vali_getTransactions_erc20Transfers_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].to, + message.vali_getTransactions_erc20Transfers_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].value, + message.vali_getTransactions_erc20Transfers_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].address, + message.vali_getTransactions_erc20Transfers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].erc20Transfers[0].decimal, + message.vali_getTransactions_erc20Transfers_decimal + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].data, + message.vali_getTransactions_erc20Transfers_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_getTransactions_1); + console.log(message.vali_getTransactions_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction history response with invalid hash on ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // Fetching a single transaction + let transaction; + try { + transaction = await dataService.getTransaction({ + hash: data.incorrect_hash, // Incorrect Transaction Hash + chainId: Number(randomChainId), + }); + + if (transaction === null || Object.keys(transaction).length === 0) { + addContext(test, message.vali_getTransactions_2); + console.log(message.vali_getTransactions_2); + } else { + addContext(test, message.fail_getTransactions_6); + assert.fail(message.fail_getTransactions_6); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_6); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction history response when hash hex is not with 32 size on ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // Fetching a single transaction + try { + await dataService.getTransaction({ + hash: data.invalid_hash, // Invalid Transaction Hash + chainId: Number(randomChainId), + }); + + addContext(test, message.fail_getTransactions_7); + assert.fail(message.fail_getTransactions_7); + } catch (e) { + if (e.errors[0].constraints.isHex === constant.hash_32) { + addContext(test, message.vali_getTransactions_3); + console.log(message.vali_getTransactions_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transactions history response with invalid account on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + let a = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.invalidSender, + }); + + addContext(test, message.fail_getTransactions_10); + assert.fail(message.fail_getTransactions_10); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_getTransactions_6); + console.log(message.vali_getTransactions_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transactions history response with incorrect account on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.incorrectSender, + }); + + addContext(test, message.fail_getTransactions_11); + assert.fail(message.fail_getTransactions_11); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_getTransactions_7); + console.log(message.vali_getTransactions_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/mainnet/transactionHistory/newWallet_xdai.spec.js b/test/specs/mainnet/transactionHistory/newWallet_xdai.spec.js deleted file mode 100644 index 6ea251b..0000000 --- a/test/specs/mainnet/transactionHistory/newWallet_xdai.spec.js +++ /dev/null @@ -1,1717 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { ethers, utils, providers } from 'ethers'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import helper from '../../../utils/helper.js'; -import testUtils from '../../../utils/testUtils.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; -import { dirname } from 'path'; -import { fileURLToPath } from 'url'; -import fs from 'fs'; -import path from 'path'; - -let xdaiMainNetSdk; -let xdaiNativeAddress = null; -let xdaiDataService; -let runTest; -const __dirname = dirname(fileURLToPath(import.meta.url)); - -describe('The PrimeSDK, when get the single transaction and multiple transaction details with xdai network on the MainNet (with new wallet)', function () { - before(async function () { - const filePath = path.join(__dirname, '../../../utils/testUtils.json'); - const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: sharedState.newPrivateKey }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // initializating Data service... - try { - xdaiDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - }, data.retry); // Retry this async test up to 5 times - }); - - beforeEach(async function () { - // validate the balance of the wallet - try { - let output = await xdaiDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === xdaiNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_xdaiUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }); - - it('SMOKE: Validate the transaction history of the native token transaction on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // get single transaction history details - let transactionHash; - let singleTransaction; - - if (!(userOpsReceipt === null)) { - try { - transactionHash = userOpsReceipt.receipt.transactionHash; - singleTransaction = await xdaiDataService.getTransaction({ - hash: transactionHash, - chainId: Number(data.xdai_chainid), - }); - - try { - assert.isNumber( - singleTransaction.chainId, - message.vali_getTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.hash, - message.vali_getTransaction_hash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.nonce, - message.vali_getTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.blockHash, - message.vali_getTransaction_blockHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.blockNumber, - message.vali_getTransaction_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.from, - message.vali_getTransaction_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.to, - message.vali_getTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.value, - message.vali_getTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.gasPrice, - message.vali_getTransaction_gasPrice - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.gasLimit, - message.vali_getTransaction_gasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.input, - message.vali_getTransaction_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.status, - message.vali_getTransaction_status - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.blockExplorerUrl, - message.vali_getTransaction_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.transactionIndex, - message.vali_getTransaction_transactionIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.gasUsed, - message.vali_getTransaction_gasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].transactionIndex, - message.vali_getTransaction_log_transactionIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].blockNumber, - message.vali_getTransaction_log_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].transactionHash, - message.vali_getTransaction_log_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].address, - message.vali_getTransaction_log_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].topics, - message.vali_getTransaction_log_topics - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].data, - message.vali_getTransaction_log_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].logIndex, - message.vali_getTransaction_log_logIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].blockHash, - message.vali_getTransaction_log_blockHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransaction_1); - } - } else { - addContext(test, message.vali_getTransaction_1); - console.log(message.vali_getTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response with random transaction in xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // Fetching historical transactions - let transactions; - let randomTransaction; - - try { - transactions = await xdaiDataService.getTransactions({ - chainId: Number(data.xdai_chainid), - account: data.sender, - }); - - randomTransaction = - Math.floor(Math.random() * (transactions.transactions.length - 1)) + - 1; - - try { - assert.isNumber( - transactions.transactions[randomTransaction].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].preVerificationGas, - message.vali_getTransactions_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response of the native transaction in xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // wait for the 10 seconds - helper.wait(10000); - - // Fetching historical transactions - let transactions; - try { - transactions = await xdaiDataService.getTransactions({ - chainId: Number(data.xdai_chainid), - account: data.sender, - page: 1, - limit: 10, - }); - - if (userOpsReceipt != null) { - try { - assert.isNumber( - transactions.transactions[0].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].blockNumber, - message.vali_getTransactions_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].target, - message.vali_getTransactions_target - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].preVerificationGas, - 'The preVerificationGas value is empty in the get transactions response.' - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].from, - message.vali_getTransactions_nativeTransfers_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].to, - message.vali_getTransactions_nativeTransfers_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].value, - message.vali_getTransactions_nativeTransfers_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].asset, - message.vali_getTransactions_nativeTransfers_asset - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].address, - message.vali_getTransactions_nativeTransfers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nativeTransfers[0].decimal, - message.vali_getTransactions_nativeTransfers_decimal - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].data, - message.vali_getTransactions_nativeTransfers_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_getTransactions_1); - console.log(message.vali_getTransactions_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response of the erc20 transaction in xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_2); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_3); - } - - // get transfer From encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits( - data.erc20_value, - data.erc20_usdc_decimal - ), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_5); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // wait for the 10 seconds - helper.wait(10000); - - // Fetching historical transactions - let transactions; - try { - transactions = await xdaiDataService.getTransactions({ - chainId: Number(data.xdai_chainid), - account: data.sender, - page: 1, - limit: 10, - }); - - if (userOpsReceipt != null) { - try { - assert.isNumber( - transactions.transactions[0].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].preVerificationGas, - message.vali_getTransactions_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].from, - message.vali_getTransactions_erc20Transfers_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].to, - message.vali_getTransactions_erc20Transfers_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].value, - message.vali_getTransactions_erc20Transfers_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].address, - message.vali_getTransactions_erc20Transfers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].erc20Transfers[0].decimal, - message.vali_getTransactions_erc20Transfers_decimal - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].data, - message.vali_getTransactions_erc20Transfers_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_getTransactions_1); - console.log(message.vali_getTransactions_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction history response with invalid hash on xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // Fetching a single transaction - let transaction; - try { - transaction = await xdaiDataService.getTransaction({ - hash: data.incorrect_hash, // Incorrect Transaction Hash - chainId: Number(data.xdai_chainid), - }); - - if (transaction === null || Object.keys(transaction).length === 0) { - addContext(test, message.vali_getTransactions_2); - console.log(message.vali_getTransactions_2); - } else { - addContext(test, message.fail_getTransactions_6); - assert.fail(message.fail_getTransactions_6); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction history response when hash hex is not with 32 size on xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // Fetching a single transaction - try { - await xdaiDataService.getTransaction({ - hash: data.invalid_hash, // Invalid Transaction Hash - chainId: Number(data.xdai_chainid), - }); - - addContext(test, message.fail_getTransactions_7); - assert.fail(message.fail_getTransactions_7); - } catch (e) { - if (e.errors[0].constraints.isHex === constant.hash_32) { - addContext(test, message.vali_getTransactions_3); - console.log(message.vali_getTransactions_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with invalid account in xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let a = await xdaiDataService.getTransactions({ - chainId: Number(data.xdai_chainid), - account: data.invalidSender, - }); - - addContext(test, message.fail_getTransactions_10); - assert.fail(message.fail_getTransactions_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_getTransactions_6); - console.log(message.vali_getTransactions_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with incorrect account in xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await xdaiDataService.getTransactions({ - chainId: Number(data.xdai_chainid), - account: data.incorrectSender, - }); - - addContext(test, message.fail_getTransactions_11); - assert.fail(message.fail_getTransactions_11); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_getTransactions_7); - console.log(message.vali_getTransactions_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/transactionHistory/oldWallet_transactionHistory.spec.js b/test/specs/mainnet/transactionHistory/oldWallet_transactionHistory.spec.js new file mode 100644 index 0000000..61fd363 --- /dev/null +++ b/test/specs/mainnet/transactionHistory/oldWallet_transactionHistory.spec.js @@ -0,0 +1,1798 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers, utils, providers } from 'ethers'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import helper from '../../../utils/helper.js'; +import { + randomChainId, + randomChainName, + randomProviderNetwork, + randomTokenAddress, +} from '../../../utils/sharedData_mainnet.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; + +let mainnetPrimeSdk; +let etherspotWalletAddress; +let nativeAddress = null; +let dataService; +let runTest; + +describe('Validate the single transaction and multiple transaction details on the MainNet (with old wallet)', function () { + before(async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + + try { + assert.strictEqual( + mainnetPrimeSdk.state.EOAAddress, + data.eoaAddress, + message.vali_eoa_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get EtherspotWallet address + try { + etherspotWalletAddress = + await mainnetPrimeSdk.getCounterFactualAddress(); + + try { + assert.strictEqual( + etherspotWalletAddress, + data.sender, + message.vali_smart_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e.message); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + var test = this; + + await customRetryAsync(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + it( + 'SMOKE: Validate the transaction history of the native token transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // get single transaction history details + let transactionHash; + let singleTransaction; + + if (!(userOpsReceipt === null)) { + try { + transactionHash = userOpsReceipt.receipt.transactionHash; + singleTransaction = await dataService.getTransaction({ + hash: transactionHash, + chainId: Number(randomChainId), + }); + + try { + assert.isNumber( + singleTransaction.chainId, + message.vali_getTransaction_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.hash, + message.vali_getTransaction_hash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.nonce, + message.vali_getTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.blockHash, + message.vali_getTransaction_blockHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.blockNumber, + message.vali_getTransaction_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.from, + message.vali_getTransaction_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.to, + message.vali_getTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.value, + message.vali_getTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.gasPrice, + message.vali_getTransaction_gasPrice + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.gasLimit, + message.vali_getTransaction_gasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.input, + message.vali_getTransaction_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.status, + message.vali_getTransaction_status + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.blockExplorerUrl, + message.vali_getTransaction_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.transactionIndex, + message.vali_getTransaction_transactionIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.gasUsed, + message.vali_getTransaction_gasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.logs[0].transactionIndex, + message.vali_getTransaction_log_transactionIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.logs[0].blockNumber, + message.vali_getTransaction_log_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].transactionHash, + message.vali_getTransaction_log_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].address, + message.vali_getTransaction_log_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].topics, + message.vali_getTransaction_log_topics + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].data, + message.vali_getTransaction_log_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.logs[0].logIndex, + message.vali_getTransaction_log_logIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].blockHash, + message.vali_getTransaction_log_blockHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransaction_1); + } + } else { + addContext(test, message.vali_getTransaction_1); + console.log(message.vali_getTransaction_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transactions history response with random transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // Fetching historical transactions + let transactions; + let randomTransaction; + + try { + transactions = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.sender, + }); + + randomTransaction = + Math.floor( + Math.random() * (transactions.transactions.length - 1) + ) + 1; + + try { + assert.isNumber( + transactions.transactions[randomTransaction].chainId, + message.vali_getTransactions_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].sender, + message.vali_getTransactions_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].transactionHash, + message.vali_getTransactions_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].userOpHash, + message.vali_getTransactions_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].actualGasCost, + message.vali_getTransactions_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].actualGasUsed, + message.vali_getTransactions_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].success, + message.vali_getTransactions_success + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].timestamp, + message.vali_getTransactions_timestamp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].paymaster, + message.vali_getTransactions_paymaster + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].value, + message.vali_getTransactions_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].blockExplorerUrl, + message.vali_getTransactions_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].input, + message.vali_getTransactions_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].nonce, + message.vali_getTransactions_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].initCode, + message.vali_getTransactions_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].callData, + message.vali_getTransactions_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction] + .verificationGasLimit, + message.vali_getTransactions_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].preVerificationGas, + message.vali_getTransactions_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].maxFeePerGas, + message.vali_getTransactions_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction] + .maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transactions history response of the native transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // wait for the 10 seconds + helper.wait(10000); + + // Fetching historical transactions + let transactions; + try { + transactions = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.sender, + page: 1, + limit: 10, + }); + + if (userOpsReceipt != null) { + try { + assert.isNumber( + transactions.transactions[0].chainId, + message.vali_getTransactions_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].blockNumber, + message.vali_getTransactions_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].sender, + message.vali_getTransactions_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].target, + message.vali_getTransactions_target + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].transactionHash, + message.vali_getTransactions_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].userOpHash, + message.vali_getTransactions_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasCost, + message.vali_getTransactions_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasUsed, + message.vali_getTransactions_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].success, + message.vali_getTransactions_success + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].timestamp, + message.vali_getTransactions_timestamp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].paymaster, + message.vali_getTransactions_paymaster + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].value, + message.vali_getTransactions_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].blockExplorerUrl, + message.vali_getTransactions_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].input, + message.vali_getTransactions_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].nonce, + message.vali_getTransactions_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].initCode, + message.vali_getTransactions_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].callData, + message.vali_getTransactions_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].verificationGasLimit, + message.vali_getTransactions_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].preVerificationGas, + 'The preVerificationGas value is empty in the get transactions response.' + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxFeePerGas, + message.vali_getTransactions_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].from, + message.vali_getTransactions_nativeTransfers_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].to, + message.vali_getTransactions_nativeTransfers_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].value, + message.vali_getTransactions_nativeTransfers_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].asset, + message.vali_getTransactions_nativeTransfers_asset + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].address, + message.vali_getTransactions_nativeTransfers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].nativeTransfers[0].decimal, + message.vali_getTransactions_nativeTransfers_decimal + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].data, + message.vali_getTransactions_nativeTransfers_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_getTransactions_1); + console.log(message.vali_getTransactions_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transactions history response of the erc20 transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_2); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_3); + } + + // get transfer From encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_5); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // wait for the 10 seconds + helper.wait(10000); + + // Fetching historical transactions + let transactions; + try { + transactions = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.sender, + page: 1, + limit: 10, + }); + + if (userOpsReceipt != null) { + try { + assert.isNumber( + transactions.transactions[0].chainId, + message.vali_getTransactions_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].sender, + message.vali_getTransactions_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].transactionHash, + message.vali_getTransactions_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].userOpHash, + message.vali_getTransactions_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasCost, + message.vali_getTransactions_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasUsed, + message.vali_getTransactions_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].success, + message.vali_getTransactions_success + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].timestamp, + message.vali_getTransactions_timestamp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].paymaster, + message.vali_getTransactions_paymaster + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].value, + message.vali_getTransactions_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].blockExplorerUrl, + message.vali_getTransactions_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].input, + message.vali_getTransactions_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].nonce, + message.vali_getTransactions_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].initCode, + message.vali_getTransactions_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].callData, + message.vali_getTransactions_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].verificationGasLimit, + message.vali_getTransactions_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].preVerificationGas, + message.vali_getTransactions_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxFeePerGas, + message.vali_getTransactions_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].from, + message.vali_getTransactions_erc20Transfers_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].to, + message.vali_getTransactions_erc20Transfers_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].value, + message.vali_getTransactions_erc20Transfers_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].address, + message.vali_getTransactions_erc20Transfers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].erc20Transfers[0].decimal, + message.vali_getTransactions_erc20Transfers_decimal + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].data, + message.vali_getTransactions_erc20Transfers_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_getTransactions_1); + console.log(message.vali_getTransactions_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction history response with invalid hash on ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // Fetching a single transaction + let transaction; + try { + transaction = await dataService.getTransaction({ + hash: data.incorrect_hash, // Incorrect Transaction Hash + chainId: Number(randomChainId), + }); + + if (transaction === null || Object.keys(transaction).length === 0) { + addContext(test, message.vali_getTransactions_2); + console.log(message.vali_getTransactions_2); + } else { + addContext(test, message.fail_getTransactions_6); + assert.fail(message.fail_getTransactions_6); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_6); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction history response when hash hex is not with 32 size on ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // Fetching a single transaction + try { + await dataService.getTransaction({ + hash: data.invalid_hash, // Invalid Transaction Hash + chainId: Number(randomChainId), + }); + + addContext(test, message.fail_getTransactions_7); + assert.fail(message.fail_getTransactions_7); + } catch (e) { + if (e.errors[0].constraints.isHex === constant.hash_32) { + addContext(test, message.vali_getTransactions_3); + console.log(message.vali_getTransactions_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transactions history response with invalid account on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + let a = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.invalidSender, + }); + + addContext(test, message.fail_getTransactions_10); + assert.fail(message.fail_getTransactions_10); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_getTransactions_6); + console.log(message.vali_getTransactions_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transactions history response with incorrect account on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.incorrectSender, + }); + + addContext(test, message.fail_getTransactions_11); + assert.fail(message.fail_getTransactions_11); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_getTransactions_7); + console.log(message.vali_getTransactions_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/mainnet/transactionHistory/optimism.spec.js b/test/specs/mainnet/transactionHistory/optimism.spec.js deleted file mode 100644 index 27cefe2..0000000 --- a/test/specs/mainnet/transactionHistory/optimism.spec.js +++ /dev/null @@ -1,1806 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { ethers, utils, providers } from 'ethers'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import helper from '../../../utils/helper.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let optimismMainNetSdk; -let optimismEtherspotWalletAddress; -let optimismNativeAddress = null; -let optimismDataService; -let runTest; - -describe('The PrimeSDK, when get the single transaction and multiple transaction details with optimism network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - optimismMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.optimism_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.optimism_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - optimismMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - optimismEtherspotWalletAddress = - await optimismMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - optimismEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - optimismDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await optimismDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.optimism_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === optimismNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_optimismUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the transaction history of the native token transaction on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await optimismMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await optimismMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await optimismMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // get single transaction history details - let transactionHash; - let singleTransaction; - - if (!(userOpsReceipt === null)) { - try { - transactionHash = userOpsReceipt.receipt.transactionHash; - singleTransaction = await optimismDataService.getTransaction({ - hash: transactionHash, - chainId: Number(data.optimism_chainid), - }); - - try { - assert.isNumber( - singleTransaction.chainId, - message.vali_getTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.hash, - message.vali_getTransaction_hash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.nonce, - message.vali_getTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.blockHash, - message.vali_getTransaction_blockHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.blockNumber, - message.vali_getTransaction_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.from, - message.vali_getTransaction_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.to, - message.vali_getTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.value, - message.vali_getTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.gasPrice, - message.vali_getTransaction_gasPrice - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.gasLimit, - message.vali_getTransaction_gasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.input, - message.vali_getTransaction_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.status, - message.vali_getTransaction_status - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.blockExplorerUrl, - message.vali_getTransaction_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.transactionIndex, - message.vali_getTransaction_transactionIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.gasUsed, - message.vali_getTransaction_gasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].transactionIndex, - message.vali_getTransaction_log_transactionIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].blockNumber, - message.vali_getTransaction_log_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].transactionHash, - message.vali_getTransaction_log_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].address, - message.vali_getTransaction_log_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].topics, - message.vali_getTransaction_log_topics - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].data, - message.vali_getTransaction_log_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].logIndex, - message.vali_getTransaction_log_logIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].blockHash, - message.vali_getTransaction_log_blockHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransaction_1); - } - } else { - addContext(test, message.vali_getTransaction_1); - console.log(message.vali_getTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response with random transaction in optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // Fetching historical transactions - let transactions; - let randomTransaction; - - try { - transactions = await optimismDataService.getTransactions({ - chainId: Number(data.optimism_chainid), - account: data.sender, - }); - - randomTransaction = - Math.floor(Math.random() * (transactions.transactions.length - 1)) + - 1; - - try { - assert.isNumber( - transactions.transactions[randomTransaction].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].preVerificationGas, - message.vali_getTransactions_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response of the native transaction in optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await optimismMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await optimismMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await optimismMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // wait for the 10 seconds - helper.wait(10000); - - // Fetching historical transactions - let transactions; - try { - transactions = await optimismDataService.getTransactions({ - chainId: Number(data.optimism_chainid), - account: data.sender, - page: 1, - limit: 10, - }); - - if (userOpsReceipt != null) { - try { - assert.isNumber( - transactions.transactions[0].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].blockNumber, - message.vali_getTransactions_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].target, - message.vali_getTransactions_target - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].preVerificationGas, - 'The preVerificationGas value is empty in the get transactions response.' - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].from, - message.vali_getTransactions_nativeTransfers_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].to, - message.vali_getTransactions_nativeTransfers_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].value, - message.vali_getTransactions_nativeTransfers_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].asset, - message.vali_getTransactions_nativeTransfers_asset - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].address, - message.vali_getTransactions_nativeTransfers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nativeTransfers[0].decimal, - message.vali_getTransactions_nativeTransfers_decimal - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].data, - message.vali_getTransactions_nativeTransfers_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_getTransactions_1); - console.log(message.vali_getTransactions_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response of the erc20 transaction in optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_2); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_3); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_4); - } - - // get transfer From encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_5); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await optimismMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await optimismMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await optimismMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // wait for the 10 seconds - helper.wait(10000); - - // Fetching historical transactions - let transactions; - try { - transactions = await optimismDataService.getTransactions({ - chainId: Number(data.optimism_chainid), - account: data.sender, - page: 1, - limit: 10, - }); - - if (userOpsReceipt != null) { - try { - assert.isNumber( - transactions.transactions[0].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].blockNumber, - message.vali_getTransactions_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].target, - message.vali_getTransactions_target - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].preVerificationGas, - message.vali_getTransactions_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].from, - message.vali_getTransactions_erc20Transfers_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].to, - message.vali_getTransactions_erc20Transfers_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].value, - message.vali_getTransactions_erc20Transfers_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].asset, - message.vali_getTransactions_erc20Transfers_asset - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].address, - message.vali_getTransactions_erc20Transfers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].erc20Transfers[0].decimal, - message.vali_getTransactions_erc20Transfers_decimal - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].data, - message.vali_getTransactions_erc20Transfers_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_getTransactions_1); - console.log(message.vali_getTransactions_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction history response with invalid hash on optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // Fetching a single transaction - let transaction; - try { - transaction = await optimismDataService.getTransaction({ - hash: data.incorrect_hash, // Incorrect Transaction Hash - chainId: Number(data.optimism_chainid), - }); - - if (transaction === null || Object.keys(transaction).length === 0) { - addContext(test, message.vali_getTransactions_2); - console.log(message.vali_getTransactions_2); - } else { - addContext(test, message.fail_getTransactions_6); - assert.fail(message.fail_getTransactions_6); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction history response when hash hex is not with 32 size on optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // Fetching a single transaction - try { - await optimismDataService.getTransaction({ - hash: data.invalid_hash, // Invalid Transaction Hash - chainId: Number(data.optimism_chainid), - }); - - addContext(test, message.fail_getTransactions_7); - assert.fail(message.fail_getTransactions_7); - } catch (e) { - if (e.errors[0].constraints.isHex === constant.hash_32) { - addContext(test, message.vali_getTransactions_3); - console.log(message.vali_getTransactions_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with invalid chainid in optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let transactions = await optimismDataService.getTransactions({ - chainId: Number(data.invalid_optimism_chainid), - account: data.sender, - }); - - if (transactions.transactions.length === 0) { - addContext(test, message.vali_getTransactions_4); - console.log(message.vali_getTransactions_4); - } else { - addContext(test, message.fail_getTransactions_8); - assert.fail(message.fail_getTransactions_8); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with invalid account in optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let a = await optimismDataService.getTransactions({ - chainId: Number(data.optimism_chainid), - account: data.invalidSender, - }); - - addContext(test, message.fail_getTransactions_10); - assert.fail(message.fail_getTransactions_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_getTransactions_6); - console.log(message.vali_getTransactions_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with incorrect account in optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await optimismDataService.getTransactions({ - chainId: Number(data.optimism_chainid), - account: data.incorrectSender, - }); - - addContext(test, message.fail_getTransactions_11); - assert.fail(message.fail_getTransactions_11); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_getTransactions_7); - console.log(message.vali_getTransactions_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/transactionHistory/xdai.spec.js b/test/specs/mainnet/transactionHistory/xdai.spec.js deleted file mode 100644 index 609514a..0000000 --- a/test/specs/mainnet/transactionHistory/xdai.spec.js +++ /dev/null @@ -1,1743 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { ethers, utils, providers } from 'ethers'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import helper from '../../../utils/helper.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let xdaiMainNetSdk; -let xdaiEtherspotWalletAddress; -let xdaiNativeAddress = null; -let xdaiDataService; -let runTest; - -describe('The PrimeSDK, when get the single transaction and multiple transaction details with xdai network on the MainNet (with old wallet)', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - xdaiMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - xdaiEtherspotWalletAddress = - await xdaiMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - xdaiEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - xdaiDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await xdaiDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === xdaiNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_xdaiUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the transaction history of the native token transaction on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // get single transaction history details - let transactionHash; - let singleTransaction; - - if (!(userOpsReceipt === null)) { - try { - transactionHash = userOpsReceipt.receipt.transactionHash; - singleTransaction = await xdaiDataService.getTransaction({ - hash: transactionHash, - chainId: Number(data.xdai_chainid), - }); - - try { - assert.isNumber( - singleTransaction.chainId, - message.vali_getTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.hash, - message.vali_getTransaction_hash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.nonce, - message.vali_getTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.blockHash, - message.vali_getTransaction_blockHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.blockNumber, - message.vali_getTransaction_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.from, - message.vali_getTransaction_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.to, - message.vali_getTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.value, - message.vali_getTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.gasPrice, - message.vali_getTransaction_gasPrice - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.gasLimit, - message.vali_getTransaction_gasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.input, - message.vali_getTransaction_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.status, - message.vali_getTransaction_status - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.blockExplorerUrl, - message.vali_getTransaction_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.transactionIndex, - message.vali_getTransaction_transactionIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.gasUsed, - message.vali_getTransaction_gasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].transactionIndex, - message.vali_getTransaction_log_transactionIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].blockNumber, - message.vali_getTransaction_log_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].transactionHash, - message.vali_getTransaction_log_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].address, - message.vali_getTransaction_log_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].topics, - message.vali_getTransaction_log_topics - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].data, - message.vali_getTransaction_log_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].logIndex, - message.vali_getTransaction_log_logIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].blockHash, - message.vali_getTransaction_log_blockHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransaction_1); - } - } else { - addContext(test, message.vali_getTransaction_1); - console.log(message.vali_getTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response with random transaction in xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // Fetching historical transactions - let transactions; - let randomTransaction; - - try { - transactions = await xdaiDataService.getTransactions({ - chainId: Number(data.xdai_chainid), - account: data.sender, - }); - - randomTransaction = - Math.floor(Math.random() * (transactions.transactions.length - 1)) + - 1; - - try { - assert.isNumber( - transactions.transactions[randomTransaction].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].preVerificationGas, - message.vali_getTransactions_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response of the native transaction in xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // wait for the 10 seconds - helper.wait(10000); - - // Fetching historical transactions - let transactions; - try { - transactions = await xdaiDataService.getTransactions({ - chainId: Number(data.xdai_chainid), - account: data.sender, - page: 1, - limit: 10, - }); - - if (userOpsReceipt != null) { - try { - assert.isNumber( - transactions.transactions[0].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].blockNumber, - message.vali_getTransactions_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].target, - message.vali_getTransactions_target - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].preVerificationGas, - 'The preVerificationGas value is empty in the get transactions response.' - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].from, - message.vali_getTransactions_nativeTransfers_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].to, - message.vali_getTransactions_nativeTransfers_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].value, - message.vali_getTransactions_nativeTransfers_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].asset, - message.vali_getTransactions_nativeTransfers_asset - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].address, - message.vali_getTransactions_nativeTransfers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nativeTransfers[0].decimal, - message.vali_getTransactions_nativeTransfers_decimal - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].data, - message.vali_getTransactions_nativeTransfers_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_getTransactions_1); - console.log(message.vali_getTransactions_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response of the erc20 transaction in xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_2); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_3); - } - - // get transfer From encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits( - data.erc20_value, - data.erc20_usdc_decimal - ), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_5); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // wait for the 10 seconds - helper.wait(10000); - - // Fetching historical transactions - let transactions; - try { - transactions = await xdaiDataService.getTransactions({ - chainId: Number(data.xdai_chainid), - account: data.sender, - page: 1, - limit: 10, - }); - - if (userOpsReceipt != null) { - try { - assert.isNumber( - transactions.transactions[0].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].preVerificationGas, - message.vali_getTransactions_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].from, - message.vali_getTransactions_erc20Transfers_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].to, - message.vali_getTransactions_erc20Transfers_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].value, - message.vali_getTransactions_erc20Transfers_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].address, - message.vali_getTransactions_erc20Transfers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].erc20Transfers[0].decimal, - message.vali_getTransactions_erc20Transfers_decimal - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].data, - message.vali_getTransactions_erc20Transfers_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_getTransactions_1); - console.log(message.vali_getTransactions_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction history response with invalid hash on xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // Fetching a single transaction - let transaction; - try { - transaction = await xdaiDataService.getTransaction({ - hash: data.incorrect_hash, // Incorrect Transaction Hash - chainId: Number(data.xdai_chainid), - }); - - if (transaction === null || Object.keys(transaction).length === 0) { - addContext(test, message.vali_getTransactions_2); - console.log(message.vali_getTransactions_2); - } else { - addContext(test, message.fail_getTransactions_6); - assert.fail(message.fail_getTransactions_6); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction history response when hash hex is not with 32 size on xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // Fetching a single transaction - try { - await xdaiDataService.getTransaction({ - hash: data.invalid_hash, // Invalid Transaction Hash - chainId: Number(data.xdai_chainid), - }); - - addContext(test, message.fail_getTransactions_7); - assert.fail(message.fail_getTransactions_7); - } catch (e) { - if (e.errors[0].constraints.isHex === constant.hash_32) { - addContext(test, message.vali_getTransactions_3); - console.log(message.vali_getTransactions_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with invalid account in xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let a = await xdaiDataService.getTransactions({ - chainId: Number(data.xdai_chainid), - account: data.invalidSender, - }); - - addContext(test, message.fail_getTransactions_10); - assert.fail(message.fail_getTransactions_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_getTransactions_6); - console.log(message.vali_getTransactions_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with incorrect account in xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await xdaiDataService.getTransactions({ - chainId: Number(data.xdai_chainid), - account: data.incorrectSender, - }); - - addContext(test, message.fail_getTransactions_11); - assert.fail(message.fail_getTransactions_11); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_getTransactions_7); - console.log(message.vali_getTransactions_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/transferringFunds/arbitrum.spec.js b/test/specs/mainnet/transferringFunds/arbitrum.spec.js deleted file mode 100644 index d21be54..0000000 --- a/test/specs/mainnet/transferringFunds/arbitrum.spec.js +++ /dev/null @@ -1,3687 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { ethers, utils, providers } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import abi from '../../../data/nftabi.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let arbitrumMainNetSdk; -let arbitrumEtherspotWalletAddress; -let arbitrumNativeAddress = null; -let arbitrumDataService; -let runTest; - -describe('The PrimeSDK, when transfer a token with arbitrum network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - arbitrumMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.arbitrum_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.arbitrum_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - arbitrumMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - arbitrumEtherspotWalletAddress = - await arbitrumMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - arbitrumEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - arbitrumDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await arbitrumDataService.getAccountBalances({ - account: data.sender, - chainId: data.arbitrum_chainid, - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === arbitrumNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_arbitrumUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Perform the transfer native token with valid details on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await arbitrumMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await arbitrumMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await arbitrumMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer ERC20 token with valid details on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - - try { - assert.isTrue( - provider._isProvider, - message.vali_erc20Transfer_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - - try { - assert.isNotEmpty(decimals, message.vali_erc20Contract_decimals); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - - try { - assert.isNotEmpty( - transactionData, - message.vali_erc20Contract_transferFrom - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_transferFrom); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_arbitrumUSDC, - data: transactionData, - }); - - try { - assert.isNotEmpty(userOpsBatch.to, message.vali_addTransaction_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.value[0], - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await arbitrumMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await arbitrumMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer ERC721 NFT token with valid details on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get erc721 Contract Interface - let erc721Interface; - let erc721Data; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Data = erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.tokenId, - ]); - - try { - assert.isNotEmpty( - erc721Data, - message.vali_erc721Transfer_contractInterface - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_contractInterface); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.nft_tokenAddress, - data: erc721Data, - }); - - try { - assert.isNotEmpty( - userOpsBatch.to[0], - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.data[0], - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.value[0], - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - let op; - try { - op = await arbitrumMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sending to the bundler - let uoHash; - try { - uoHash = await arbitrumMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer native token by passing callGasLimit with valid details on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await arbitrumMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - // passing callGasLimit as 40000 to manually set it - let op; - try { - op = await arbitrumMainNetSdk.estimate({ callGasLimit: 40000 }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await arbitrumMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the concurrent userops with valid details on the arbitrum network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await arbitrumMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount = 1; - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await arbitrumMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await arbitrumMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await arbitrumMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the incorrect To Address while estimate the added transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.incorrectRecipient, // incorrect to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await arbitrumMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_9); - assert.fail(message.fail_estimateTransaction_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.invalidRecipient, // invalid to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await arbitrumMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_10); - assert.fail(message.fail_estimateTransaction_10); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_9); - console.log(message.vali_estimateTransaction_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the invalid Value while estimate the added transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseUnits(data.invalidValue), // invalid value - }); - - addContext(test, message.fail_estimateTransaction_11); - assert.fail(message.fail_estimateTransaction_11); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_estimateTransaction_10); - console.log(message.vali_estimateTransaction_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the very small Value while estimate the added transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseUnits(data.smallValue), // very small value - }); - - addContext(test, message.fail_estimateTransaction_12); - assert.fail(message.fail_estimateTransaction_12); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_estimateTransaction_11); - console.log(message.vali_estimateTransaction_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without adding transaction to the batch while estimate the added transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await arbitrumMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit with the incorrect To Address while estimate the added transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.incorrectRecipient, // incorrect to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await arbitrumMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_9); - assert.fail(message.fail_estimateTransaction_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.invalidRecipient, // invalid to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await arbitrumMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_10); - assert.fail(message.fail_estimateTransaction_10); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit without adding transaction to the batch while estimate the added transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await arbitrumMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.empty_batch) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid provider netowrk details while Getting the Decimal from ERC20 Contract on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.invalidProviderNetwork_arbitrum // invalid provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_estimateTransaction_14); - assert.fail(message.fail_estimateTransaction_14); - } catch (e) { - if (e.reason === constant.invalid_network_2) { - addContext(test, message.vali_estimateTransaction_13); - console.log(message.vali_estimateTransaction_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without provider netowrk details while Getting the Decimal from ERC20 Contract on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider(); // without provider - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_estimateTransaction_14); - assert.fail(message.fail_estimateTransaction_14); - } catch (e) { - if (e.reason === constant.invalid_network_2) { - addContext(test, message.vali_estimateTransaction_13); - console.log(message.vali_estimateTransaction_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with other provider netowrk details while Getting the Decimal from ERC20 Contract on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.otherProviderNetwork_arbitrum // other provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_estimateTransaction_15); - assert.fail(message.fail_estimateTransaction_15); - } catch (e) { - let error = e.message; - if (error.includes(constant.invalid_value_3)) { - addContext(test, message.vali_estimateTransaction_14); - console.log(message.vali_estimateTransaction_14); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_15); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect Token Address details while Getting the Decimal from ERC20 Contract on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.incorrectTokenAddress_arbitrumUSDC, // incorrect token address - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(message.fail_erc20Transfer_1); - assert.fail(message.fail_erc20Transfer_1); - } catch (e) { - if (e.reason === constant.invalid_address_6) { - addContext(test, message.vali_erc20Transfer_1); - console.log(message.vali_erc20Transfer_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid Token Address i.e. missing character details while Getting the Decimal from ERC20 Contract on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.invalidTokenAddress_arbitrumUSDC, // invalid token address - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_erc20Transfer_2); - assert.fail(message.fail_erc20Transfer_2); - } catch (e) { - if (e.reason === constant.invalid_address_4) { - addContext(test, message.vali_erc20Transfer_2); - console.log(message.vali_erc20Transfer_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with null Token Address details while Getting the Decimal from ERC20 Contract on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - try { - new ethers.Contract(null, ERC20_ABI, provider); // null token address - - addContext(test, message.fail_erc20Transfer_3); - assert.fail(message.fail_erc20Transfer_3); - } catch (e) { - if (e.reason === constant.contract_address_2) { - addContext(test, message.vali_erc20Transfer_3); - console.log(message.vali_erc20Transfer_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect transfer method name while Getting the transferFrom encoded data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid value while Getting the transferFrom encoded data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.invalidValue, decimals), // invalid value - ]); - - addContext(test, message.fail_erc20Transfer_5); - assert.fail(message.fail_erc20Transfer_5); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_erc20Transfer_5); - console.log(message.vali_erc20Transfer_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with very small value while Getting the transferFrom encoded data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.smallValue, decimals), // very small value - ]); - - addContext(test, message.fail_erc20Transfer_6); - assert.fail(message.fail_erc20Transfer_6); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_erc20Transfer_6); - console.log(message.vali_erc20Transfer_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without value while Getting the transferFrom encoded data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ]); - - addContext(test, message.fail_erc20Transfer_7); - assert.fail(message.fail_erc20Transfer_7); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc20Transfer_7); - console.log(message.vali_erc20Transfer_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect recipient while Getting the transferFrom encoded data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.incorrectRecipient, // incorrect recipient address - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_8); - assert.fail(message.fail_erc20Transfer_8); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Transfer_8); - console.log(message.vali_erc20Transfer_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid recipient i.e. missing character while Getting the transferFrom encoded data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.invalidRecipient, // invalid recipient address - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_9); - assert.fail(message.fail_erc20Transfer_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Transfer_9); - console.log(message.vali_erc20Transfer_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without recipient while Getting the transferFrom encoded data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_10); - assert.fail(message.fail_erc20Transfer_10); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc20Transfer_10); - console.log(message.vali_erc20Transfer_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the incorrect Token Address while adding transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.incorrectTokenAddress_arbitrumUSDC, // Incorrect Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await arbitrumMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_16); - assert.fail(message.fail_estimateTransaction_16); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_15); - console.log(message.vali_estimateTransaction_15); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_16); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the invalid Token Address i.e. missing character while adding transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.invalidTokenAddress_arbitrumUSDC, // Invalid Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await arbitrumMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_17); - assert.fail(message.fail_estimateTransaction_17); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_16); - console.log(message.vali_estimateTransaction_16); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_17); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the null Token Address while adding transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - to: null, // Null Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await arbitrumMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_18); - assert.fail(message.fail_estimateTransaction_18); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_17); - console.log(message.vali_estimateTransaction_17); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_18); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without Token Address while adding transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await arbitrumMainNetSdk.addUserOpsToBatch({ - data: transactionData, // without tokenAddress - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await arbitrumMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_19); - assert.fail(message.fail_estimateTransaction_19); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_18); - console.log(message.vali_estimateTransaction_18); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_19); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without adding transaction to the batch while estimate the added transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_arbitrum - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_arbitrumUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await arbitrumMainNetSdk.estimate(); - - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect Sender Address while creating the NFT Data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.incorrectSender, // incorrect sender address - data.recipient, - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_1); - assert.fail(message.fail_erc721Transfer_1); - } catch (e) { - if (e.reason.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc721Transfer_1); - console.log(message.vali_erc721Transfer_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with invalid Sender Address i.e. missing character while creating the NFT Data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.invalidSender, // invalid sender address - data.recipient, - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_2); - assert.fail(message.fail_erc721Transfer_2); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc721Transfer_2); - console.log(message.vali_erc721Transfer_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without Sender Address while creating the NFT Data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.recipient, // not added sender address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_3); - assert.fail(message.fail_erc721Transfer_3); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_3); - console.log(message.vali_erc721Transfer_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect Recipient Address while creating the NFT Data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.incorrectRecipient, // incorrect recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_4); - assert.fail(message.fail_erc721Transfer_4); - } catch (e) { - if (e.reason.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc721Transfer_4); - console.log(message.vali_erc721Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with invalid Recipient Address i.e. missing character while creating the NFT Data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.invalidRecipient, // invalid recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_5); - assert.fail(message.fail_erc721Transfer_5); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc721Transfer_5); - console.log(message.vali_erc721Transfer_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without Recipient Address while creating the NFT Data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, // not added recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_6); - assert.fail(message.fail_erc721Transfer_6); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_6); - console.log(message.vali_erc721Transfer_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect tokenId while creating the NFT Data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.incorrectTokenId, // incorrect tokenid - ]); - - addContext(message.fail_erc721Transfer_7); - assert.fail(message.fail_erc721Transfer_7); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_erc721Transfer_7); - console.log(message.vali_erc721Transfer_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without tokenId while creating the NFT Data on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, // not added tokenid - ]); - - addContext(test, message.fail_erc721Transfer_8); - assert.fail(message.fail_erc721Transfer_8); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_8); - console.log(message.vali_erc721Transfer_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT Token without adding transaction to the batch while estimate the added transactions to the batch on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.tokenId, - ]); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_contractInterface); - } - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await arbitrumMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops with invalid concurrentUseropsCount on the arbitrum network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount = -5; // invalid concurrent userops - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await arbitrumMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await arbitrumMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await arbitrumMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops without concurrentUseropsCount on the arbitrum network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await arbitrumMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await arbitrumMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await arbitrumMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount; // invalid concurrent userops - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await arbitrumMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await arbitrumMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await arbitrumMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops with non deployed address on the arbitrum network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - try { - if ((await provider.getCode(data.eoaAddress)).length <= 2) { - addContext(test, message.vali_deployAddress_1); - console.log(message.vali_deployAddress_1); - return; - } - - addContext(test, message.fail_deployAddress_1); - assert.fail(message.fail_deployAddress_1); - } catch (e) { - const errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_2)) { - addContext(test, message.vali_deployAddress_2); - console.log(message.vali_deployAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deployAddress_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/transferringFunds/matic.spec.js b/test/specs/mainnet/transferringFunds/matic.spec.js deleted file mode 100644 index dc53907..0000000 --- a/test/specs/mainnet/transferringFunds/matic.spec.js +++ /dev/null @@ -1,3687 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { ethers, utils, providers } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import abi from '../../../data/nftabi.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let maticMainNetSdk; -let maticEtherspotWalletAddress; -let maticNativeAddress = null; -let maticDataService; -let runTest; - -describe('The PrimeSDK, when transfer a token with matic network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - maticMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.matic_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.matic_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - maticMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - maticEtherspotWalletAddress = - await maticMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - maticEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - maticDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await maticDataService.getAccountBalances({ - account: data.sender, - chainId: data.matic_chainid, - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === maticNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_maticUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Perform the transfer native token with valid details on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await maticMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await maticMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await maticMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer ERC20 token with valid details on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - - try { - assert.isTrue( - provider._isProvider, - message.vali_erc20Transfer_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - - try { - assert.isNotEmpty(decimals, message.vali_erc20Contract_decimals); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - - try { - assert.isNotEmpty( - transactionData, - message.vali_erc20Contract_transferFrom - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_transferFrom); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await maticMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_maticUSDC, - data: transactionData, - }); - - try { - assert.isNotEmpty(userOpsBatch.to, message.vali_addTransaction_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.value[0], - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await maticMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await maticMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer ERC721 NFT token with valid details on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get erc721 Contract Interface - let erc721Interface; - let erc721Data; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Data = erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.tokenId, - ]); - - try { - assert.isNotEmpty( - erc721Data, - message.vali_erc721Transfer_contractInterface - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_contractInterface); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await maticMainNetSdk.addUserOpsToBatch({ - to: data.nft_tokenAddress, - data: erc721Data, - }); - - try { - assert.isNotEmpty( - userOpsBatch.to[0], - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.data[0], - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.value[0], - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - let op; - try { - op = await maticMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sending to the bundler - let uoHash; - try { - uoHash = await maticMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer native token by passing callGasLimit with valid details on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await maticMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - // passing callGasLimit as 40000 to manually set it - let op; - try { - op = await maticMainNetSdk.estimate({ callGasLimit: 40000 }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await maticMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the concurrent userops with valid details on the matic network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await maticMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount = 1; - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await maticMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await maticMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await maticMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the incorrect To Address while estimate the added transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.incorrectRecipient, // incorrect to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await maticMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_9); - assert.fail(message.fail_estimateTransaction_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.invalidRecipient, // invalid to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await maticMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_10); - assert.fail(message.fail_estimateTransaction_10); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_9); - console.log(message.vali_estimateTransaction_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the invalid Value while estimate the added transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseUnits(data.invalidValue), // invalid value - }); - - addContext(test, message.fail_estimateTransaction_11); - assert.fail(message.fail_estimateTransaction_11); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_estimateTransaction_10); - console.log(message.vali_estimateTransaction_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the very small Value while estimate the added transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseUnits(data.smallValue), // very small value - }); - - addContext(test, message.fail_estimateTransaction_12); - assert.fail(message.fail_estimateTransaction_12); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_estimateTransaction_11); - console.log(message.vali_estimateTransaction_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without adding transaction to the batch while estimate the added transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await maticMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit with the incorrect To Address while estimate the added transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.incorrectRecipient, // incorrect to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await maticMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_9); - assert.fail(message.fail_estimateTransaction_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.invalidRecipient, // invalid to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await maticMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_10); - assert.fail(message.fail_estimateTransaction_10); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit without adding transaction to the batch while estimate the added transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await maticMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.empty_batch) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid provider netowrk details while Getting the Decimal from ERC20 Contract on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.invalidProviderNetwork_matic // invalid provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_estimateTransaction_14); - assert.fail(message.fail_estimateTransaction_14); - } catch (e) { - if (e.reason === constant.invalid_network_2) { - addContext(test, message.vali_estimateTransaction_13); - console.log(message.vali_estimateTransaction_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without provider netowrk details while Getting the Decimal from ERC20 Contract on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider(); // without provider - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_estimateTransaction_14); - assert.fail(message.fail_estimateTransaction_14); - } catch (e) { - if (e.reason === constant.invalid_network_2) { - addContext(test, message.vali_estimateTransaction_13); - console.log(message.vali_estimateTransaction_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with other provider netowrk details while Getting the Decimal from ERC20 Contract on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.otherProviderNetwork_matic // other provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_estimateTransaction_15); - assert.fail(message.fail_estimateTransaction_15); - } catch (e) { - let error = e.message; - if (error.includes(constant.invalid_value_3)) { - addContext(test, message.vali_estimateTransaction_14); - console.log(message.vali_estimateTransaction_14); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_15); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect Token Address details while Getting the Decimal from ERC20 Contract on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.incorrectTokenAddress_maticUSDC, // incorrect token address - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(message.fail_erc20Transfer_1); - assert.fail(message.fail_erc20Transfer_1); - } catch (e) { - if (e.reason === constant.invalid_address_6) { - addContext(test, message.vali_erc20Transfer_1); - console.log(message.vali_erc20Transfer_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid Token Address i.e. missing character details while Getting the Decimal from ERC20 Contract on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.invalidTokenAddress_maticUSDC, // invalid token address - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_erc20Transfer_2); - assert.fail(message.fail_erc20Transfer_2); - } catch (e) { - if (e.reason === constant.invalid_address_4) { - addContext(test, message.vali_erc20Transfer_2); - console.log(message.vali_erc20Transfer_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with null Token Address details while Getting the Decimal from ERC20 Contract on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - try { - new ethers.Contract(null, ERC20_ABI, provider); // null token address - - addContext(test, message.fail_erc20Transfer_3); - assert.fail(message.fail_erc20Transfer_3); - } catch (e) { - if (e.reason === constant.contract_address_2) { - addContext(test, message.vali_erc20Transfer_3); - console.log(message.vali_erc20Transfer_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect transfer method name while Getting the transferFrom encoded data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid value while Getting the transferFrom encoded data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.invalidValue, decimals), // invalid value - ]); - - addContext(test, message.fail_erc20Transfer_5); - assert.fail(message.fail_erc20Transfer_5); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_erc20Transfer_5); - console.log(message.vali_erc20Transfer_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with very small value while Getting the transferFrom encoded data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.smallValue, decimals), // very small value - ]); - - addContext(test, message.fail_erc20Transfer_6); - assert.fail(message.fail_erc20Transfer_6); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_erc20Transfer_6); - console.log(message.vali_erc20Transfer_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without value while Getting the transferFrom encoded data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ]); - - addContext(test, message.fail_erc20Transfer_7); - assert.fail(message.fail_erc20Transfer_7); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc20Transfer_7); - console.log(message.vali_erc20Transfer_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect recipient while Getting the transferFrom encoded data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.incorrectRecipient, // incorrect recipient address - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_8); - assert.fail(message.fail_erc20Transfer_8); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Transfer_8); - console.log(message.vali_erc20Transfer_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid recipient i.e. missing character while Getting the transferFrom encoded data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.invalidRecipient, // invalid recipient address - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_9); - assert.fail(message.fail_erc20Transfer_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Transfer_9); - console.log(message.vali_erc20Transfer_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without recipient while Getting the transferFrom encoded data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_10); - assert.fail(message.fail_erc20Transfer_10); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc20Transfer_10); - console.log(message.vali_erc20Transfer_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the incorrect Token Address while adding transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.incorrectTokenAddress_maticUSDC, // Incorrect Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await maticMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_16); - assert.fail(message.fail_estimateTransaction_16); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_15); - console.log(message.vali_estimateTransaction_15); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_16); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the invalid Token Address i.e. missing character while adding transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: data.invalidTokenAddress_maticUSDC, // Invalid Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await maticMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_17); - assert.fail(message.fail_estimateTransaction_17); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_16); - console.log(message.vali_estimateTransaction_16); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_17); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the null Token Address while adding transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - to: null, // Null Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await maticMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_18); - assert.fail(message.fail_estimateTransaction_18); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_17); - console.log(message.vali_estimateTransaction_17); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_18); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without Token Address while adding transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await maticMainNetSdk.addUserOpsToBatch({ - data: transactionData, // without tokenAddress - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await maticMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_19); - assert.fail(message.fail_estimateTransaction_19); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_18); - console.log(message.vali_estimateTransaction_18); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_19); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without adding transaction to the batch while estimate the added transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_matic - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_maticUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await maticMainNetSdk.estimate(); - - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect Sender Address while creating the NFT Data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.incorrectSender, // incorrect sender address - data.recipient, - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_1); - assert.fail(message.fail_erc721Transfer_1); - } catch (e) { - if (e.reason.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc721Transfer_1); - console.log(message.vali_erc721Transfer_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with invalid Sender Address i.e. missing character while creating the NFT Data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.invalidSender, // invalid sender address - data.recipient, - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_2); - assert.fail(message.fail_erc721Transfer_2); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc721Transfer_2); - console.log(message.vali_erc721Transfer_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without Sender Address while creating the NFT Data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.recipient, // not added sender address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_3); - assert.fail(message.fail_erc721Transfer_3); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_3); - console.log(message.vali_erc721Transfer_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect Recipient Address while creating the NFT Data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.incorrectRecipient, // incorrect recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_4); - assert.fail(message.fail_erc721Transfer_4); - } catch (e) { - if (e.reason.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc721Transfer_4); - console.log(message.vali_erc721Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with invalid Recipient Address i.e. missing character while creating the NFT Data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.invalidRecipient, // invalid recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_5); - assert.fail(message.fail_erc721Transfer_5); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc721Transfer_5); - console.log(message.vali_erc721Transfer_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without Recipient Address while creating the NFT Data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, // not added recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_6); - assert.fail(message.fail_erc721Transfer_6); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_6); - console.log(message.vali_erc721Transfer_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect tokenId while creating the NFT Data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.incorrectTokenId, // incorrect tokenid - ]); - - addContext(message.fail_erc721Transfer_7); - assert.fail(message.fail_erc721Transfer_7); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_erc721Transfer_7); - console.log(message.vali_erc721Transfer_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without tokenId while creating the NFT Data on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, // not added tokenid - ]); - - addContext(test, message.fail_erc721Transfer_8); - assert.fail(message.fail_erc721Transfer_8); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_8); - console.log(message.vali_erc721Transfer_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT Token without adding transaction to the batch while estimate the added transactions to the batch on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.tokenId, - ]); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_contractInterface); - } - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await maticMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops with invalid concurrentUseropsCount on the matic network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount = -5; // invalid concurrent userops - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await maticMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await maticMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await maticMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops without concurrentUseropsCount on the matic network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await maticMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await maticMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await maticMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount; // invalid concurrent userops - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await maticMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await maticMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await maticMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops with non deployed address on the matic network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - try { - if ((await provider.getCode(data.eoaAddress)).length <= 2) { - addContext(test, message.vali_deployAddress_1); - console.log(message.vali_deployAddress_1); - return; - } - - addContext(test, message.fail_deployAddress_1); - assert.fail(message.fail_deployAddress_1); - } catch (e) { - const errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_2)) { - addContext(test, message.vali_deployAddress_2); - console.log(message.vali_deployAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deployAddress_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/transferringFunds/newWallet_transferringFunds.spec.js b/test/specs/mainnet/transferringFunds/newWallet_transferringFunds.spec.js new file mode 100644 index 0000000..0e7e1dd --- /dev/null +++ b/test/specs/mainnet/transferringFunds/newWallet_transferringFunds.spec.js @@ -0,0 +1,3856 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers, utils, providers } from 'ethers'; +import { assert } from 'chai'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidProviderNetwork, + randomInvalidTokenAddress, + randomOtherProviderNetwork, + randomProviderNetwork, + randomTokenAddress, +} from '../../../utils/sharedData_mainnet.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import abi from '../../../data/nftabi.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let mainnetPrimeSdk; +let nativeAddress = null; +let dataService; +let runTest; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Perform the transaction of the tokens on the MainNet (with new wallet)', function () { + before(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }); + + it( + 'SMOKE: Perform the transfer native token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the transfer ERC20 token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + + try { + assert.isTrue( + provider._isProvider, + message.vali_erc20Transfer_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + + try { + assert.isNotEmpty( + transactionData, + message.vali_erc20Contract_transferFrom + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_transferFrom); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: transactionData, + }); + + try { + assert.isNotEmpty( + userOpsBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.value[0], + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the transfer ERC721 NFT token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get erc721 Contract Interface + let erc721Interface; + let erc721Data; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Data = erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, + data.tokenId, + ]); + + try { + assert.isNotEmpty( + erc721Data, + message.vali_erc721Transfer_contractInterface + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_contractInterface); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.nft_tokenAddress, + data: erc721Data, + }); + + try { + assert.isNotEmpty( + userOpsBatch.to[0], + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.data[0], + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.value[0], + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + let op; + try { + op = await mainnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Perform the transfer native token by passing callGasLimit with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + // passing callGasLimit as 40000 to manually set it + let op; + try { + op = await mainnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the concurrent userops with valid details on the ' + + randomChainName + + ' network', + async function () { + // NOTE: assume the sender wallet is deployed + + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + const provider = new providers.JsonRpcProvider(); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) + // Staked entities can send as many userops as they want + let concurrentUseropsCount = 1; + const userops = []; + const uoHashes = []; + + try { + while (--concurrentUseropsCount >= 0) { + const op = await mainnetPrimeSdk.estimate({ + key: concurrentUseropsCount, + }); + userops.push(op); + } + + console.log('Sending userops...'); + for (const op of userops) { + const uoHash = await mainnetPrimeSdk.send(op); + uoHashes.push(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + try { + console.log('Waiting for transactions...'); + const userOpsReceipts = new Array(uoHashes.length).fill(null); + const timeout = Date.now() + 60000; // 1 minute timeout + while ( + userOpsReceipts.some((receipt) => receipt == null) && + Date.now() < timeout + ) { + helper.wait(2000); + for (let i = 0; i < uoHashes.length; ++i) { + if (userOpsReceipts[i]) continue; + const uoHash = uoHashes[i]; + userOpsReceipts[i] = + await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } + + if (userOpsReceipts.some((receipt) => receipt != null)) { + for (const uoReceipt of userOpsReceipts) { + if (!uoReceipt) continue; + addContext(test, message.vali_submitTransaction_1); + console.log(message.vali_submitTransaction_1); + } + } else { + addContext(test, message.vali_submitTransaction_2); + console.log(message.vali_submitTransaction_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getUserOpReceipt_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the incorrect To Address while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.incorrectRecipient, // incorrect to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_9); + assert.fail(message.fail_estimateTransaction_9); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_estimateTransaction_8); + console.log(message.vali_estimateTransaction_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.invalidRecipient, // invalid to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_10); + assert.fail(message.fail_estimateTransaction_10); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_9); + console.log(message.vali_estimateTransaction_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the invalid Value while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseUnits(data.invalidValue), // invalid value + }); + + addContext(test, message.fail_estimateTransaction_11); + assert.fail(message.fail_estimateTransaction_11); + } catch (e) { + if (e.reason === constant.invalid_value_1) { + addContext(test, message.vali_estimateTransaction_10); + console.log(message.vali_estimateTransaction_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the very small Value while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseUnits(data.smallValue), // very small value + }); + + addContext(test, message.fail_estimateTransaction_12); + assert.fail(message.fail_estimateTransaction_12); + } catch (e) { + if (e.reason === constant.invalid_value_2) { + addContext(test, message.vali_estimateTransaction_11); + console.log(message.vali_estimateTransaction_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_12); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_13); + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.invalid_parameter) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer native token by passing callGasLimit with the incorrect To Address while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.incorrectRecipient, // incorrect to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + // passing callGasLimit as 40000 to manually set it + try { + await mainnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + addContext(test, message.fail_estimateTransaction_9); + assert.fail(message.fail_estimateTransaction_9); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_estimateTransaction_8); + console.log(message.vali_estimateTransaction_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer native token by passing callGasLimit with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.invalidRecipient, // invalid to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + // passing callGasLimit as 40000 to manually set it + try { + await mainnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + addContext(test, message.fail_estimateTransaction_10); + assert.fail(message.fail_estimateTransaction_10); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_8); + console.log(message.vali_estimateTransaction_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer native token by passing callGasLimit without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + // passing callGasLimit as 40000 to manually set it + try { + await mainnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + addContext(test, message.fail_estimateTransaction_13); + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.empty_batch) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid provider netowrk details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomInvalidProviderNetwork // invalid provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without provider netowrk details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider(); // without provider + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with other provider netowrk details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomOtherProviderNetwork // other provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with incorrect Token Address details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomIncorrectTokenAddress, // incorrect token address + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid Token Address i.e. missing character details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomInvalidTokenAddress, // invalid token address + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with null Token Address details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + try { + new ethers.Contract(null, ERC20_ABI, provider); // null token address + + addContext(test, message.fail_erc20Transfer_3); + assert.fail(message.fail_erc20Transfer_3); + } catch (e) { + if (e.reason === constant.contract_address_2) { + addContext(test, message.vali_erc20Transfer_3); + console.log(message.vali_erc20Transfer_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with incorrect transfer method name while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid value while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ethers.utils.parseUnits( + data.invalidValue, + data.erc20_usdc_decimal + ), // invalid value + ]); + + addContext(test, message.fail_erc20Transfer_5); + assert.fail(message.fail_erc20Transfer_5); + } catch (e) { + if (e.reason === constant.invalid_value_1) { + addContext(test, message.vali_erc20Transfer_5); + console.log(message.vali_erc20Transfer_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with very small value while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ethers.utils.parseUnits(data.smallValue, data.erc20_usdc_decimal), // very small value + ]); + + addContext(test, message.fail_erc20Transfer_6); + assert.fail(message.fail_erc20Transfer_6); + } catch (e) { + if (e.reason === constant.invalid_value_2) { + addContext(test, message.vali_erc20Transfer_6); + console.log(message.vali_erc20Transfer_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without value while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ]); + + addContext(test, message.fail_erc20Transfer_7); + assert.fail(message.fail_erc20Transfer_7); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc20Transfer_7); + console.log(message.vali_erc20Transfer_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with incorrect recipient while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.incorrectRecipient, // incorrect recipient address + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_8); + assert.fail(message.fail_erc20Transfer_8); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc20Transfer_8); + console.log(message.vali_erc20Transfer_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid recipient i.e. missing character while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.invalidRecipient, // invalid recipient address + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_9); + assert.fail(message.fail_erc20Transfer_9); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc20Transfer_9); + console.log(message.vali_erc20Transfer_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without recipient while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_10); + assert.fail(message.fail_erc20Transfer_10); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc20Transfer_10); + console.log(message.vali_erc20Transfer_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with the incorrect Token Address while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomIncorrectTokenAddress, // Incorrect Token Address + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_16); + assert.fail(message.fail_estimateTransaction_16); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_estimateTransaction_15); + console.log(message.vali_estimateTransaction_15); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_16); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with the invalid Token Address i.e. missing character while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomInvalidTokenAddress, // Invalid Token Address + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_17); + assert.fail(message.fail_estimateTransaction_17); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_16); + console.log(message.vali_estimateTransaction_16); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_17); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with the null Token Address while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: null, // Null Token Address + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_18); + assert.fail(message.fail_estimateTransaction_18); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_17); + console.log(message.vali_estimateTransaction_17); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_18); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without Token Address while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + data: transactionData, // without tokenAddress + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_19); + assert.fail(message.fail_estimateTransaction_19); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_18); + console.log(message.vali_estimateTransaction_18); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_19); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.invalid_parameter) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with incorrect Sender Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.incorrectSender, // incorrect sender address + data.recipient, + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_1); + assert.fail(message.fail_erc721Transfer_1); + } catch (e) { + if (e.reason.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc721Transfer_1); + console.log(message.vali_erc721Transfer_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with invalid Sender Address i.e. missing character while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.invalidSender, // invalid sender address + data.recipient, + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_2); + assert.fail(message.fail_erc721Transfer_2); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc721Transfer_2); + console.log(message.vali_erc721Transfer_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token without Sender Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.recipient, // not added sender address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_3); + assert.fail(message.fail_erc721Transfer_3); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc721Transfer_3); + console.log(message.vali_erc721Transfer_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with incorrect Recipient Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.incorrectRecipient, // incorrect recipient address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_4); + assert.fail(message.fail_erc721Transfer_4); + } catch (e) { + if (e.reason.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc721Transfer_4); + console.log(message.vali_erc721Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with invalid Recipient Address i.e. missing character while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.invalidRecipient, // invalid recipient address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_5); + assert.fail(message.fail_erc721Transfer_5); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc721Transfer_5); + console.log(message.vali_erc721Transfer_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token without Recipient Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, // not added recipient address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_6); + assert.fail(message.fail_erc721Transfer_6); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc721Transfer_6); + console.log(message.vali_erc721Transfer_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with incorrect tokenId while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, + data.incorrectTokenId, // incorrect tokenid + ]); + + addContext(message.fail_erc721Transfer_7); + assert.fail(message.fail_erc721Transfer_7); + } catch (e) { + if (e.reason === constant.invalid_bignumber_1) { + addContext(test, message.vali_erc721Transfer_7); + console.log(message.vali_erc721Transfer_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token without tokenId while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, // not added tokenid + ]); + + addContext(test, message.fail_erc721Transfer_8); + assert.fail(message.fail_erc721Transfer_8); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc721Transfer_8); + console.log(message.vali_erc721Transfer_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT Token without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, + data.tokenId, + ]); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_contractInterface); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_13); + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.invalid_parameter) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the concurrent userops with invalid concurrentUseropsCount on the ' + + randomChainName + + ' network', + async function () { + // NOTE: assume the sender wallet is deployed + + var test = this; + if (runTest) { + await customRetryAsync(async function () { + const provider = new providers.JsonRpcProvider(); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) + // Staked entities can send as many userops as they want + let concurrentUseropsCount = -5; // invalid concurrent userops + const userops = []; + const uoHashes = []; + + try { + while (--concurrentUseropsCount >= 0) { + const op = await mainnetPrimeSdk.estimate({ + key: concurrentUseropsCount, + }); + userops.push(op); + } + + console.log('Sending userops...'); + for (const op of userops) { + const uoHash = await mainnetPrimeSdk.send(op); + uoHashes.push(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + try { + console.log('Waiting for transactions...'); + const userOpsReceipts = new Array(uoHashes.length).fill(null); + const timeout = Date.now() + 60000; // 1 minute timeout + while ( + userOpsReceipts.some((receipt) => receipt == null) && + Date.now() < timeout + ) { + helper.wait(2000); + for (let i = 0; i < uoHashes.length; ++i) { + if (userOpsReceipts[i]) continue; + const uoHash = uoHashes[i]; + userOpsReceipts[i] = + await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } + + if (userOpsReceipts.some((receipt) => receipt != null)) { + for (const uoReceipt of userOpsReceipts) { + if (!uoReceipt) continue; + addContext(test, message.vali_submitTransaction_1); + console.log(message.vali_submitTransaction_1); + } + } else { + addContext(test, message.vali_submitTransaction_2); + console.log(message.vali_submitTransaction_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getUserOpReceipt_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the concurrent userops without concurrentUseropsCount on the ' + + randomChainName + + ' network', + async function () { + // NOTE: assume the sender wallet is deployed + + var test = this; + if (runTest) { + await customRetryAsync(async function () { + const provider = new providers.JsonRpcProvider(); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) + // Staked entities can send as many userops as they want + let concurrentUseropsCount; // invalid concurrent userops + const userops = []; + const uoHashes = []; + + try { + while (--concurrentUseropsCount >= 0) { + const op = await mainnetPrimeSdk.estimate({ + key: concurrentUseropsCount, + }); + userops.push(op); + } + + console.log('Sending userops...'); + for (const op of userops) { + const uoHash = await mainnetPrimeSdk.send(op); + uoHashes.push(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + try { + console.log('Waiting for transactions...'); + const userOpsReceipts = new Array(uoHashes.length).fill(null); + const timeout = Date.now() + 60000; // 1 minute timeout + while ( + userOpsReceipts.some((receipt) => receipt == null) && + Date.now() < timeout + ) { + helper.wait(2000); + for (let i = 0; i < uoHashes.length; ++i) { + if (userOpsReceipts[i]) continue; + const uoHash = uoHashes[i]; + userOpsReceipts[i] = + await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } + + if (userOpsReceipts.some((receipt) => receipt != null)) { + for (const uoReceipt of userOpsReceipts) { + if (!uoReceipt) continue; + addContext(test, message.vali_submitTransaction_1); + console.log(message.vali_submitTransaction_1); + } + } else { + addContext(test, message.vali_submitTransaction_2); + console.log(message.vali_submitTransaction_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getUserOpReceipt_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the concurrent userops with non deployed address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + const provider = new providers.JsonRpcProvider(); + + try { + if ((await provider.getCode(data.eoaAddress)).length <= 2) { + addContext(test, message.vali_deployAddress_1); + console.log(message.vali_deployAddress_1); + return; + } + + addContext(test, message.fail_deployAddress_1); + assert.fail(message.fail_deployAddress_1); + } catch (e) { + const errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_2)) { + addContext(test, message.vali_deployAddress_2); + console.log(message.vali_deployAddress_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployAddress_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/mainnet/transferringFunds/newWallet_xdai.spec.js b/test/specs/mainnet/transferringFunds/newWallet_xdai.spec.js deleted file mode 100644 index 6263c39..0000000 --- a/test/specs/mainnet/transferringFunds/newWallet_xdai.spec.js +++ /dev/null @@ -1,3583 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { ethers, utils, providers } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import testUtils from '../../../utils/testUtils.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import abi from '../../../data/nftabi.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; -import { dirname } from 'path'; -import { fileURLToPath } from 'url'; -import fs from 'fs'; -import path from 'path'; - -let xdaiMainNetSdk; -let xdaiNativeAddress = null; -let xdaiDataService; -let runTest; -const __dirname = dirname(fileURLToPath(import.meta.url)); - -describe('The PrimeSDK, when transfer a token with xdai network on the MainNet (with new wallet)', function () { - before(async function () { - const filePath = path.join(__dirname, '../../../utils/testUtils.json'); - const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: sharedState.newPrivateKey }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // initializating Data service... - try { - xdaiDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - }, data.retry); // Retry this async test up to 5 times - }); - - beforeEach(async function () { - // validate the balance of the wallet - try { - let output = await xdaiDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.xdai_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === xdaiNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_xdaiUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }); - - it('SMOKE: Perform the transfer native token with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer ERC20 token with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - - try { - assert.isTrue( - provider._isProvider, - message.vali_erc20Transfer_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits( - data.erc20_value, - data.erc20_usdc_decimal - ), - ] - ); - - try { - assert.isNotEmpty( - transactionData, - message.vali_erc20Contract_transferFrom - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_transferFrom); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: transactionData, - }); - - try { - assert.isNotEmpty(userOpsBatch.to, message.vali_addTransaction_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.value[0], - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer ERC721 NFT token with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get erc721 Contract Interface - let erc721Interface; - let erc721Data; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Data = erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.tokenId, - ]); - - try { - assert.isNotEmpty( - erc721Data, - message.vali_erc721Transfer_contractInterface - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_contractInterface); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.nft_tokenAddress, - data: erc721Data, - }); - - try { - assert.isNotEmpty( - userOpsBatch.to[0], - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.data[0], - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.value[0], - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - let op; - try { - op = await xdaiMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer native token by passing callGasLimit with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - // passing callGasLimit as 40000 to manually set it - let op; - try { - op = await xdaiMainNetSdk.estimate({ callGasLimit: 40000 }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the concurrent userops with valid details on the xdai network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount = 1; - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await xdaiMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await xdaiMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the incorrect To Address while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.incorrectRecipient, // incorrect to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_9); - assert.fail(message.fail_estimateTransaction_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.invalidRecipient, // invalid to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_10); - assert.fail(message.fail_estimateTransaction_10); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_9); - console.log(message.vali_estimateTransaction_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the invalid Value while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseUnits(data.invalidValue), // invalid value - }); - - addContext(test, message.fail_estimateTransaction_11); - assert.fail(message.fail_estimateTransaction_11); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_estimateTransaction_10); - console.log(message.vali_estimateTransaction_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the very small Value while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseUnits(data.smallValue), // very small value - }); - - addContext(test, message.fail_estimateTransaction_12); - assert.fail(message.fail_estimateTransaction_12); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_estimateTransaction_11); - console.log(message.vali_estimateTransaction_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without adding transaction to the batch while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit with the incorrect To Address while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.incorrectRecipient, // incorrect to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await xdaiMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_9); - assert.fail(message.fail_estimateTransaction_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.invalidRecipient, // invalid to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await xdaiMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_10); - assert.fail(message.fail_estimateTransaction_10); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit without adding transaction to the batch while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await xdaiMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.empty_batch) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid provider netowrk details while Getting the Decimal from ERC20 Contract on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.invalidProviderNetwork_xdai // invalid provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without provider netowrk details while Getting the Decimal from ERC20 Contract on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider(); // without provider - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with other provider netowrk details while Getting the Decimal from ERC20 Contract on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.otherProviderNetwork_xdai // other provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect Token Address details while Getting the Decimal from ERC20 Contract on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.incorrectTokenAddress_xdaiUSDC, // incorrect token address - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid Token Address i.e. missing character details while Getting the Decimal from ERC20 Contract on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.invalidTokenAddress_xdaiUSDC, // invalid token address - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with null Token Address details while Getting the Decimal from ERC20 Contract on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - try { - new ethers.Contract(null, ERC20_ABI, provider); // null token address - - addContext(test, message.fail_erc20Transfer_3); - assert.fail(message.fail_erc20Transfer_3); - } catch (e) { - if (e.reason === constant.contract_address_2) { - addContext(test, message.vali_erc20Transfer_3); - console.log(message.vali_erc20Transfer_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect transfer method name while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid value while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.invalidValue, data.erc20_usdc_decimal), // invalid value - ]); - - addContext(test, message.fail_erc20Transfer_5); - assert.fail(message.fail_erc20Transfer_5); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_erc20Transfer_5); - console.log(message.vali_erc20Transfer_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with very small value while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.smallValue, data.erc20_usdc_decimal), // very small value - ]); - - addContext(test, message.fail_erc20Transfer_6); - assert.fail(message.fail_erc20Transfer_6); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_erc20Transfer_6); - console.log(message.vali_erc20Transfer_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without value while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ]); - - addContext(test, message.fail_erc20Transfer_7); - assert.fail(message.fail_erc20Transfer_7); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc20Transfer_7); - console.log(message.vali_erc20Transfer_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect recipient while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.incorrectRecipient, // incorrect recipient address - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_8); - assert.fail(message.fail_erc20Transfer_8); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Transfer_8); - console.log(message.vali_erc20Transfer_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid recipient i.e. missing character while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.invalidRecipient, // invalid recipient address - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_9); - assert.fail(message.fail_erc20Transfer_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Transfer_9); - console.log(message.vali_erc20Transfer_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without recipient while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_10); - assert.fail(message.fail_erc20Transfer_10); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc20Transfer_10); - console.log(message.vali_erc20Transfer_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the incorrect Token Address while adding transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits( - data.erc20_value, - data.erc20_usdc_decimal - ), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.incorrectTokenAddress_xdaiUSDC, // Incorrect Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_16); - assert.fail(message.fail_estimateTransaction_16); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_15); - console.log(message.vali_estimateTransaction_15); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_16); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the invalid Token Address i.e. missing character while adding transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits( - data.erc20_value, - data.erc20_usdc_decimal - ), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.invalidTokenAddress_xdaiUSDC, // Invalid Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_17); - assert.fail(message.fail_estimateTransaction_17); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_16); - console.log(message.vali_estimateTransaction_16); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_17); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the null Token Address while adding transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits( - data.erc20_value, - data.erc20_usdc_decimal - ), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: null, // Null Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_18); - assert.fail(message.fail_estimateTransaction_18); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_17); - console.log(message.vali_estimateTransaction_17); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_18); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without Token Address while adding transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits( - data.erc20_value, - data.erc20_usdc_decimal - ), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - data: transactionData, // without tokenAddress - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_19); - assert.fail(message.fail_estimateTransaction_19); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_18); - console.log(message.vali_estimateTransaction_18); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_19); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without adding transaction to the batch while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect Sender Address while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.incorrectSender, // incorrect sender address - data.recipient, - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_1); - assert.fail(message.fail_erc721Transfer_1); - } catch (e) { - if (e.reason.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc721Transfer_1); - console.log(message.vali_erc721Transfer_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with invalid Sender Address i.e. missing character while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.invalidSender, // invalid sender address - data.recipient, - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_2); - assert.fail(message.fail_erc721Transfer_2); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc721Transfer_2); - console.log(message.vali_erc721Transfer_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without Sender Address while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.recipient, // not added sender address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_3); - assert.fail(message.fail_erc721Transfer_3); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_3); - console.log(message.vali_erc721Transfer_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect Recipient Address while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.incorrectRecipient, // incorrect recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_4); - assert.fail(message.fail_erc721Transfer_4); - } catch (e) { - if (e.reason.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc721Transfer_4); - console.log(message.vali_erc721Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with invalid Recipient Address i.e. missing character while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.invalidRecipient, // invalid recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_5); - assert.fail(message.fail_erc721Transfer_5); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc721Transfer_5); - console.log(message.vali_erc721Transfer_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without Recipient Address while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, // not added recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_6); - assert.fail(message.fail_erc721Transfer_6); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_6); - console.log(message.vali_erc721Transfer_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect tokenId while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.incorrectTokenId, // incorrect tokenid - ]); - - addContext(message.fail_erc721Transfer_7); - assert.fail(message.fail_erc721Transfer_7); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_erc721Transfer_7); - console.log(message.vali_erc721Transfer_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without tokenId while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, // not added tokenid - ]); - - addContext(test, message.fail_erc721Transfer_8); - assert.fail(message.fail_erc721Transfer_8); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_8); - console.log(message.vali_erc721Transfer_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT Token without adding transaction to the batch while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.tokenId, - ]); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_contractInterface); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops with invalid concurrentUseropsCount on the xdai network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount = -5; // invalid concurrent userops - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await xdaiMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await xdaiMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops without concurrentUseropsCount on the xdai network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount; // invalid concurrent userops - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await xdaiMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await xdaiMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops with non deployed address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - try { - if ((await provider.getCode(data.eoaAddress)).length <= 2) { - addContext(test, message.vali_deployAddress_1); - console.log(message.vali_deployAddress_1); - return; - } - - addContext(test, message.fail_deployAddress_1); - assert.fail(message.fail_deployAddress_1); - } catch (e) { - const errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_2)) { - addContext(test, message.vali_deployAddress_2); - console.log(message.vali_deployAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deployAddress_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/transferringFunds/oldWallet_transferringFunds.spec.js b/test/specs/mainnet/transferringFunds/oldWallet_transferringFunds.spec.js new file mode 100644 index 0000000..27bf7a9 --- /dev/null +++ b/test/specs/mainnet/transferringFunds/oldWallet_transferringFunds.spec.js @@ -0,0 +1,3888 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers, utils, providers } from 'ethers'; +import { assert } from 'chai'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidProviderNetwork, + randomInvalidTokenAddress, + randomOtherProviderNetwork, + randomProviderNetwork, + randomTokenAddress, +} from '../../../utils/sharedData_mainnet.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import abi from '../../../data/nftabi.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; + +let mainnetPrimeSdk; +let etherspotWalletAddress; +let nativeAddress = null; +let dataService; +let runTest; + +describe('Perform the transaction of the tokens on the MainNet (with old wallet)', function () { + before(async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + + try { + assert.strictEqual( + mainnetPrimeSdk.state.EOAAddress, + data.eoaAddress, + message.vali_eoa_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get EtherspotWallet address + try { + etherspotWalletAddress = + await mainnetPrimeSdk.getCounterFactualAddress(); + + try { + assert.strictEqual( + etherspotWalletAddress, + data.sender, + message.vali_smart_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e.message); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + var test = this; + + await customRetryAsync(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + it( + 'SMOKE: Perform the transfer native token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the transfer ERC20 token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + + try { + assert.isTrue( + provider._isProvider, + message.vali_erc20Transfer_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + + try { + assert.isNotEmpty( + transactionData, + message.vali_erc20Contract_transferFrom + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_transferFrom); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: transactionData, + }); + + try { + assert.isNotEmpty( + userOpsBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.value[0], + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the transfer ERC721 NFT token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get erc721 Contract Interface + let erc721Interface; + let erc721Data; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Data = erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, + data.tokenId, + ]); + + try { + assert.isNotEmpty( + erc721Data, + message.vali_erc721Transfer_contractInterface + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_contractInterface); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.nft_tokenAddress, + data: erc721Data, + }); + + try { + assert.isNotEmpty( + userOpsBatch.to[0], + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.data[0], + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.value[0], + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + let op; + try { + op = await mainnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Perform the transfer native token by passing callGasLimit with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + // passing callGasLimit as 40000 to manually set it + let op; + try { + op = await mainnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await mainnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the concurrent userops with valid details on the ' + + randomChainName + + ' network', + async function () { + // NOTE: assume the sender wallet is deployed + + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + const provider = new providers.JsonRpcProvider(); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) + // Staked entities can send as many userops as they want + let concurrentUseropsCount = 1; + const userops = []; + const uoHashes = []; + + try { + while (--concurrentUseropsCount >= 0) { + const op = await mainnetPrimeSdk.estimate({ + key: concurrentUseropsCount, + }); + userops.push(op); + } + + console.log('Sending userops...'); + for (const op of userops) { + const uoHash = await mainnetPrimeSdk.send(op); + uoHashes.push(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + try { + console.log('Waiting for transactions...'); + const userOpsReceipts = new Array(uoHashes.length).fill(null); + const timeout = Date.now() + 60000; // 1 minute timeout + while ( + userOpsReceipts.some((receipt) => receipt == null) && + Date.now() < timeout + ) { + helper.wait(2000); + for (let i = 0; i < uoHashes.length; ++i) { + if (userOpsReceipts[i]) continue; + const uoHash = uoHashes[i]; + userOpsReceipts[i] = + await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } + + if (userOpsReceipts.some((receipt) => receipt != null)) { + for (const uoReceipt of userOpsReceipts) { + if (!uoReceipt) continue; + addContext(test, message.vali_submitTransaction_1); + console.log(message.vali_submitTransaction_1); + } + } else { + addContext(test, message.vali_submitTransaction_2); + console.log(message.vali_submitTransaction_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getUserOpReceipt_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the incorrect To Address while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.incorrectRecipient, // incorrect to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_9); + assert.fail(message.fail_estimateTransaction_9); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_estimateTransaction_8); + console.log(message.vali_estimateTransaction_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.invalidRecipient, // invalid to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_10); + assert.fail(message.fail_estimateTransaction_10); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_9); + console.log(message.vali_estimateTransaction_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the invalid Value while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseUnits(data.invalidValue), // invalid value + }); + + addContext(test, message.fail_estimateTransaction_11); + assert.fail(message.fail_estimateTransaction_11); + } catch (e) { + if (e.reason === constant.invalid_value_1) { + addContext(test, message.vali_estimateTransaction_10); + console.log(message.vali_estimateTransaction_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the very small Value while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseUnits(data.smallValue), // very small value + }); + + addContext(test, message.fail_estimateTransaction_12); + assert.fail(message.fail_estimateTransaction_12); + } catch (e) { + if (e.reason === constant.invalid_value_2) { + addContext(test, message.vali_estimateTransaction_11); + console.log(message.vali_estimateTransaction_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_12); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_13); + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.invalid_parameter) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer native token by passing callGasLimit with the incorrect To Address while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.incorrectRecipient, // incorrect to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + // passing callGasLimit as 40000 to manually set it + try { + await mainnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + addContext(test, message.fail_estimateTransaction_9); + assert.fail(message.fail_estimateTransaction_9); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_estimateTransaction_8); + console.log(message.vali_estimateTransaction_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer native token by passing callGasLimit with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.invalidRecipient, // invalid to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + // passing callGasLimit as 40000 to manually set it + try { + await mainnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + addContext(test, message.fail_estimateTransaction_10); + assert.fail(message.fail_estimateTransaction_10); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_8); + console.log(message.vali_estimateTransaction_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer native token by passing callGasLimit without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + // passing callGasLimit as 40000 to manually set it + try { + await mainnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + addContext(test, message.fail_estimateTransaction_13); + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.empty_batch) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid provider netowrk details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomInvalidProviderNetwork // invalid provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without provider netowrk details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider(); // without provider + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with other provider netowrk details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomOtherProviderNetwork // other provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with incorrect Token Address details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomIncorrectTokenAddress, // incorrect token address + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid Token Address i.e. missing character details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomInvalidTokenAddress, // invalid token address + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with null Token Address details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + try { + new ethers.Contract(null, ERC20_ABI, provider); // null token address + + addContext(test, message.fail_erc20Transfer_3); + assert.fail(message.fail_erc20Transfer_3); + } catch (e) { + if (e.reason === constant.contract_address_2) { + addContext(test, message.vali_erc20Transfer_3); + console.log(message.vali_erc20Transfer_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with incorrect transfer method name while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid value while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ethers.utils.parseUnits( + data.invalidValue, + data.erc20_usdc_decimal + ), // invalid value + ]); + + addContext(test, message.fail_erc20Transfer_5); + assert.fail(message.fail_erc20Transfer_5); + } catch (e) { + if (e.reason === constant.invalid_value_1) { + addContext(test, message.vali_erc20Transfer_5); + console.log(message.vali_erc20Transfer_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with very small value while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ethers.utils.parseUnits(data.smallValue, data.erc20_usdc_decimal), // very small value + ]); + + addContext(test, message.fail_erc20Transfer_6); + assert.fail(message.fail_erc20Transfer_6); + } catch (e) { + if (e.reason === constant.invalid_value_2) { + addContext(test, message.vali_erc20Transfer_6); + console.log(message.vali_erc20Transfer_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without value while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ]); + + addContext(test, message.fail_erc20Transfer_7); + assert.fail(message.fail_erc20Transfer_7); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc20Transfer_7); + console.log(message.vali_erc20Transfer_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with incorrect recipient while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.incorrectRecipient, // incorrect recipient address + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_8); + assert.fail(message.fail_erc20Transfer_8); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc20Transfer_8); + console.log(message.vali_erc20Transfer_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid recipient i.e. missing character while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.invalidRecipient, // invalid recipient address + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_9); + assert.fail(message.fail_erc20Transfer_9); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc20Transfer_9); + console.log(message.vali_erc20Transfer_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without recipient while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_10); + assert.fail(message.fail_erc20Transfer_10); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc20Transfer_10); + console.log(message.vali_erc20Transfer_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with the incorrect Token Address while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomIncorrectTokenAddress, // Incorrect Token Address + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_16); + assert.fail(message.fail_estimateTransaction_16); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_estimateTransaction_15); + console.log(message.vali_estimateTransaction_15); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_16); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with the invalid Token Address i.e. missing character while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: randomInvalidTokenAddress, // Invalid Token Address + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_17); + assert.fail(message.fail_estimateTransaction_17); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_16); + console.log(message.vali_estimateTransaction_16); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_17); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with the null Token Address while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + to: null, // Null Token Address + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_18); + assert.fail(message.fail_estimateTransaction_18); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_17); + console.log(message.vali_estimateTransaction_17); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_18); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without Token Address while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await mainnetPrimeSdk.addUserOpsToBatch({ + data: transactionData, // without tokenAddress + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_19); + assert.fail(message.fail_estimateTransaction_19); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_18); + console.log(message.vali_estimateTransaction_18); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_19); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.invalid_parameter) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with incorrect Sender Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.incorrectSender, // incorrect sender address + data.recipient, + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_1); + assert.fail(message.fail_erc721Transfer_1); + } catch (e) { + if (e.reason.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc721Transfer_1); + console.log(message.vali_erc721Transfer_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with invalid Sender Address i.e. missing character while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.invalidSender, // invalid sender address + data.recipient, + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_2); + assert.fail(message.fail_erc721Transfer_2); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc721Transfer_2); + console.log(message.vali_erc721Transfer_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token without Sender Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.recipient, // not added sender address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_3); + assert.fail(message.fail_erc721Transfer_3); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc721Transfer_3); + console.log(message.vali_erc721Transfer_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with incorrect Recipient Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.incorrectRecipient, // incorrect recipient address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_4); + assert.fail(message.fail_erc721Transfer_4); + } catch (e) { + if (e.reason.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc721Transfer_4); + console.log(message.vali_erc721Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with invalid Recipient Address i.e. missing character while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.invalidRecipient, // invalid recipient address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_5); + assert.fail(message.fail_erc721Transfer_5); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc721Transfer_5); + console.log(message.vali_erc721Transfer_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token without Recipient Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, // not added recipient address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_6); + assert.fail(message.fail_erc721Transfer_6); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc721Transfer_6); + console.log(message.vali_erc721Transfer_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with incorrect tokenId while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, + data.incorrectTokenId, // incorrect tokenid + ]); + + addContext(message.fail_erc721Transfer_7); + assert.fail(message.fail_erc721Transfer_7); + } catch (e) { + if (e.reason === constant.invalid_bignumber_1) { + addContext(test, message.vali_erc721Transfer_7); + console.log(message.vali_erc721Transfer_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token without tokenId while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, // not added tokenid + ]); + + addContext(test, message.fail_erc721Transfer_8); + assert.fail(message.fail_erc721Transfer_8); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc721Transfer_8); + console.log(message.vali_erc721Transfer_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT Token without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, + data.tokenId, + ]); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_contractInterface); + } + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + try { + await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await mainnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_13); + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.invalid_parameter) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the concurrent userops with invalid concurrentUseropsCount on the ' + + randomChainName + + ' network', + async function () { + // NOTE: assume the sender wallet is deployed + + var test = this; + if (runTest) { + await customRetryAsync(async function () { + const provider = new providers.JsonRpcProvider(); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) + // Staked entities can send as many userops as they want + let concurrentUseropsCount = -5; // invalid concurrent userops + const userops = []; + const uoHashes = []; + + try { + while (--concurrentUseropsCount >= 0) { + const op = await mainnetPrimeSdk.estimate({ + key: concurrentUseropsCount, + }); + userops.push(op); + } + + console.log('Sending userops...'); + for (const op of userops) { + const uoHash = await mainnetPrimeSdk.send(op); + uoHashes.push(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + try { + console.log('Waiting for transactions...'); + const userOpsReceipts = new Array(uoHashes.length).fill(null); + const timeout = Date.now() + 60000; // 1 minute timeout + while ( + userOpsReceipts.some((receipt) => receipt == null) && + Date.now() < timeout + ) { + helper.wait(2000); + for (let i = 0; i < uoHashes.length; ++i) { + if (userOpsReceipts[i]) continue; + const uoHash = uoHashes[i]; + userOpsReceipts[i] = + await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } + + if (userOpsReceipts.some((receipt) => receipt != null)) { + for (const uoReceipt of userOpsReceipts) { + if (!uoReceipt) continue; + addContext(test, message.vali_submitTransaction_1); + console.log(message.vali_submitTransaction_1); + } + } else { + addContext(test, message.vali_submitTransaction_2); + console.log(message.vali_submitTransaction_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getUserOpReceipt_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the concurrent userops without concurrentUseropsCount on the ' + + randomChainName + + ' network', + async function () { + // NOTE: assume the sender wallet is deployed + + var test = this; + if (runTest) { + await customRetryAsync(async function () { + const provider = new providers.JsonRpcProvider(); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) + // Staked entities can send as many userops as they want + let concurrentUseropsCount; // invalid concurrent userops + const userops = []; + const uoHashes = []; + + try { + while (--concurrentUseropsCount >= 0) { + const op = await mainnetPrimeSdk.estimate({ + key: concurrentUseropsCount, + }); + userops.push(op); + } + + console.log('Sending userops...'); + for (const op of userops) { + const uoHash = await mainnetPrimeSdk.send(op); + uoHashes.push(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + try { + console.log('Waiting for transactions...'); + const userOpsReceipts = new Array(uoHashes.length).fill(null); + const timeout = Date.now() + 60000; // 1 minute timeout + while ( + userOpsReceipts.some((receipt) => receipt == null) && + Date.now() < timeout + ) { + helper.wait(2000); + for (let i = 0; i < uoHashes.length; ++i) { + if (userOpsReceipts[i]) continue; + const uoHash = uoHashes[i]; + userOpsReceipts[i] = + await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } + + if (userOpsReceipts.some((receipt) => receipt != null)) { + for (const uoReceipt of userOpsReceipts) { + if (!uoReceipt) continue; + addContext(test, message.vali_submitTransaction_1); + console.log(message.vali_submitTransaction_1); + } + } else { + addContext(test, message.vali_submitTransaction_2); + console.log(message.vali_submitTransaction_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getUserOpReceipt_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the concurrent userops with non deployed address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + const provider = new providers.JsonRpcProvider(); + + try { + if ((await provider.getCode(data.eoaAddress)).length <= 2) { + addContext(test, message.vali_deployAddress_1); + console.log(message.vali_deployAddress_1); + return; + } + + addContext(test, message.fail_deployAddress_1); + assert.fail(message.fail_deployAddress_1); + } catch (e) { + const errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_2)) { + addContext(test, message.vali_deployAddress_2); + console.log(message.vali_deployAddress_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployAddress_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/mainnet/transferringFunds/optimism.spec.js b/test/specs/mainnet/transferringFunds/optimism.spec.js deleted file mode 100644 index 576b822..0000000 --- a/test/specs/mainnet/transferringFunds/optimism.spec.js +++ /dev/null @@ -1,3687 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { ethers, utils, providers } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import abi from '../../../data/nftabi.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let optimismMainNetSdk; -let optimismEtherspotWalletAddress; -let optimismNativeAddress = null; -let optimismDataService; -let runTest; - -describe('The PrimeSDK, when transfer a token with optimism network on the MainNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - optimismMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.optimism_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.optimism_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - optimismMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - optimismEtherspotWalletAddress = - await optimismMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - optimismEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - optimismDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await optimismDataService.getAccountBalances({ - account: data.sender, - chainId: data.optimism_chainid, - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === optimismNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_optimismUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Perform the transfer native token with valid details on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await optimismMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await optimismMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await optimismMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer ERC20 token with valid details on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - - try { - assert.isTrue( - provider._isProvider, - message.vali_erc20Transfer_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - - try { - assert.isNotEmpty(decimals, message.vali_erc20Contract_decimals); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - - try { - assert.isNotEmpty( - transactionData, - message.vali_erc20Contract_transferFrom - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_transferFrom); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await optimismMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_optimismUSDC, - data: transactionData, - }); - - try { - assert.isNotEmpty(userOpsBatch.to, message.vali_addTransaction_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.value[0], - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await optimismMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await optimismMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer ERC721 NFT token with valid details on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get erc721 Contract Interface - let erc721Interface; - let erc721Data; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Data = erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.tokenId, - ]); - - try { - assert.isNotEmpty( - erc721Data, - message.vali_erc721Transfer_contractInterface - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_contractInterface); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await optimismMainNetSdk.addUserOpsToBatch({ - to: data.nft_tokenAddress, - data: erc721Data, - }); - - try { - assert.isNotEmpty( - userOpsBatch.to[0], - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.data[0], - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.value[0], - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - let op; - try { - op = await optimismMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sending to the bundler - let uoHash; - try { - uoHash = await optimismMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer native token by passing callGasLimit with valid details on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await optimismMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - // passing callGasLimit as 40000 to manually set it - let op; - try { - op = await optimismMainNetSdk.estimate({ callGasLimit: 40000 }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await optimismMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the concurrent userops with valid details on the optimism network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await optimismMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount = 1; - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await optimismMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await optimismMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await optimismMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the incorrect To Address while estimate the added transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.incorrectRecipient, // incorrect to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await optimismMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_9); - assert.fail(message.fail_estimateTransaction_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.invalidRecipient, // invalid to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await optimismMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_10); - assert.fail(message.fail_estimateTransaction_10); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_9); - console.log(message.vali_estimateTransaction_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the invalid Value while estimate the added transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseUnits(data.invalidValue), // invalid value - }); - - addContext(test, message.fail_estimateTransaction_11); - assert.fail(message.fail_estimateTransaction_11); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_estimateTransaction_10); - console.log(message.vali_estimateTransaction_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the very small Value while estimate the added transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseUnits(data.smallValue), // very small value - }); - - addContext(test, message.fail_estimateTransaction_12); - assert.fail(message.fail_estimateTransaction_12); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_estimateTransaction_11); - console.log(message.vali_estimateTransaction_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without adding transaction to the batch while estimate the added transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await optimismMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit with the incorrect To Address while estimate the added transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.incorrectRecipient, // incorrect to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await optimismMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_9); - assert.fail(message.fail_estimateTransaction_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.invalidRecipient, // invalid to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await optimismMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_10); - assert.fail(message.fail_estimateTransaction_10); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit without adding transaction to the batch while estimate the added transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await optimismMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.empty_batch) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid provider netowrk details while Getting the Decimal from ERC20 Contract on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.invalidProviderNetwork_optimism // invalid provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_estimateTransaction_14); - assert.fail(message.fail_estimateTransaction_14); - } catch (e) { - if (e.reason === constant.invalid_network_2) { - addContext(test, message.vali_estimateTransaction_13); - console.log(message.vali_estimateTransaction_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without provider netowrk details while Getting the Decimal from ERC20 Contract on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider(); // without provider - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_estimateTransaction_14); - assert.fail(message.fail_estimateTransaction_14); - } catch (e) { - if (e.reason === constant.invalid_network_2) { - addContext(test, message.vali_estimateTransaction_13); - console.log(message.vali_estimateTransaction_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with other provider netowrk details while Getting the Decimal from ERC20 Contract on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.otherProviderNetwork_optimism // other provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_estimateTransaction_15); - assert.fail(message.fail_estimateTransaction_15); - } catch (e) { - let error = e.message; - if (error.includes(constant.invalid_value_3)) { - addContext(test, message.vali_estimateTransaction_14); - console.log(message.vali_estimateTransaction_14); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_15); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect Token Address details while Getting the Decimal from ERC20 Contract on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.incorrectTokenAddress_optimismUSDC, // incorrect token address - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(message.fail_erc20Transfer_1); - assert.fail(message.fail_erc20Transfer_1); - } catch (e) { - if (e.reason === constant.invalid_address_6) { - addContext(test, message.vali_erc20Transfer_1); - console.log(message.vali_erc20Transfer_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid Token Address i.e. missing character details while Getting the Decimal from ERC20 Contract on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.invalidTokenAddress_optimismUSDC, // invalid token address - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_erc20Transfer_2); - assert.fail(message.fail_erc20Transfer_2); - } catch (e) { - if (e.reason === constant.invalid_address_4) { - addContext(test, message.vali_erc20Transfer_2); - console.log(message.vali_erc20Transfer_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with null Token Address details while Getting the Decimal from ERC20 Contract on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - try { - new ethers.Contract(null, ERC20_ABI, provider); // null token address - - addContext(test, message.fail_erc20Transfer_3); - assert.fail(message.fail_erc20Transfer_3); - } catch (e) { - if (e.reason === constant.contract_address_2) { - addContext(test, message.vali_erc20Transfer_3); - console.log(message.vali_erc20Transfer_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect transfer method name while Getting the transferFrom encoded data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid value while Getting the transferFrom encoded data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.invalidValue, decimals), // invalid value - ]); - - addContext(test, message.fail_erc20Transfer_5); - assert.fail(message.fail_erc20Transfer_5); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_erc20Transfer_5); - console.log(message.vali_erc20Transfer_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with very small value while Getting the transferFrom encoded data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.smallValue, decimals), // very small value - ]); - - addContext(test, message.fail_erc20Transfer_6); - assert.fail(message.fail_erc20Transfer_6); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_erc20Transfer_6); - console.log(message.vali_erc20Transfer_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without value while Getting the transferFrom encoded data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ]); - - addContext(test, message.fail_erc20Transfer_7); - assert.fail(message.fail_erc20Transfer_7); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc20Transfer_7); - console.log(message.vali_erc20Transfer_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect recipient while Getting the transferFrom encoded data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.incorrectRecipient, // incorrect recipient address - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_8); - assert.fail(message.fail_erc20Transfer_8); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Transfer_8); - console.log(message.vali_erc20Transfer_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid recipient i.e. missing character while Getting the transferFrom encoded data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.invalidRecipient, // invalid recipient address - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_9); - assert.fail(message.fail_erc20Transfer_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Transfer_9); - console.log(message.vali_erc20Transfer_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without recipient while Getting the transferFrom encoded data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_10); - assert.fail(message.fail_erc20Transfer_10); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc20Transfer_10); - console.log(message.vali_erc20Transfer_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the incorrect Token Address while adding transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.incorrectTokenAddress_optimismUSDC, // Incorrect Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await optimismMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_16); - assert.fail(message.fail_estimateTransaction_16); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_15); - console.log(message.vali_estimateTransaction_15); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_16); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the invalid Token Address i.e. missing character while adding transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: data.invalidTokenAddress_optimismUSDC, // Invalid Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await optimismMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_17); - assert.fail(message.fail_estimateTransaction_17); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_16); - console.log(message.vali_estimateTransaction_16); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_17); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the null Token Address while adding transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - to: null, // Null Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await optimismMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_18); - assert.fail(message.fail_estimateTransaction_18); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_17); - console.log(message.vali_estimateTransaction_17); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_18); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without Token Address while adding transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await optimismMainNetSdk.addUserOpsToBatch({ - data: transactionData, // without tokenAddress - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await optimismMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_19); - assert.fail(message.fail_estimateTransaction_19); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_18); - console.log(message.vali_estimateTransaction_18); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_19); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without adding transaction to the batch while estimate the added transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_optimism - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_optimismUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await optimismMainNetSdk.estimate(); - - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect Sender Address while creating the NFT Data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.incorrectSender, // incorrect sender address - data.recipient, - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_1); - assert.fail(message.fail_erc721Transfer_1); - } catch (e) { - if (e.reason.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc721Transfer_1); - console.log(message.vali_erc721Transfer_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with invalid Sender Address i.e. missing character while creating the NFT Data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.invalidSender, // invalid sender address - data.recipient, - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_2); - assert.fail(message.fail_erc721Transfer_2); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc721Transfer_2); - console.log(message.vali_erc721Transfer_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without Sender Address while creating the NFT Data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.recipient, // not added sender address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_3); - assert.fail(message.fail_erc721Transfer_3); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_3); - console.log(message.vali_erc721Transfer_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect Recipient Address while creating the NFT Data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.incorrectRecipient, // incorrect recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_4); - assert.fail(message.fail_erc721Transfer_4); - } catch (e) { - if (e.reason.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc721Transfer_4); - console.log(message.vali_erc721Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with invalid Recipient Address i.e. missing character while creating the NFT Data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.invalidRecipient, // invalid recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_5); - assert.fail(message.fail_erc721Transfer_5); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc721Transfer_5); - console.log(message.vali_erc721Transfer_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without Recipient Address while creating the NFT Data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, // not added recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_6); - assert.fail(message.fail_erc721Transfer_6); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_6); - console.log(message.vali_erc721Transfer_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect tokenId while creating the NFT Data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.incorrectTokenId, // incorrect tokenid - ]); - - addContext(message.fail_erc721Transfer_7); - assert.fail(message.fail_erc721Transfer_7); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_erc721Transfer_7); - console.log(message.vali_erc721Transfer_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without tokenId while creating the NFT Data on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, // not added tokenid - ]); - - addContext(test, message.fail_erc721Transfer_8); - assert.fail(message.fail_erc721Transfer_8); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_8); - console.log(message.vali_erc721Transfer_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT Token without adding transaction to the batch while estimate the added transactions to the batch on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.tokenId, - ]); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_contractInterface); - } - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await optimismMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops with invalid concurrentUseropsCount on the optimism network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount = -5; // invalid concurrent userops - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await optimismMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await optimismMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await optimismMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops without concurrentUseropsCount on the optimism network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await optimismMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await optimismMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await optimismMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount; // invalid concurrent userops - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await optimismMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await optimismMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await optimismMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops with non deployed address on the optimism network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - try { - if ((await provider.getCode(data.eoaAddress)).length <= 2) { - addContext(test, message.vali_deployAddress_1); - console.log(message.vali_deployAddress_1); - return; - } - - addContext(test, message.fail_deployAddress_1); - assert.fail(message.fail_deployAddress_1); - } catch (e) { - const errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_2)) { - addContext(test, message.vali_deployAddress_2); - console.log(message.vali_deployAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deployAddress_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/mainnet/transferringFunds/xdai.spec.js b/test/specs/mainnet/transferringFunds/xdai.spec.js deleted file mode 100644 index c147cfa..0000000 --- a/test/specs/mainnet/transferringFunds/xdai.spec.js +++ /dev/null @@ -1,3609 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; -import { ethers, utils, providers } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import abi from '../../../data/nftabi.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let xdaiMainNetSdk; -let xdaiEtherspotWalletAddress; -let xdaiNativeAddress = null; -let xdaiDataService; -let runTest; - -describe('The PrimeSDK, when transfer a token with xdai network on the MainNet (with old wallet)', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - xdaiMainNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.xdai_chainid), - bundlerProvider: new EtherspotBundler( - Number(data.xdai_chainid), - process.env.BUNDLER_API_KEY - ), - } - ); - - try { - assert.strictEqual( - xdaiMainNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - xdaiEtherspotWalletAddress = - await xdaiMainNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - xdaiEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - xdaiDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await xdaiDataService.getAccountBalances({ - account: data.sender, - chainId: data.xdai_chainid, - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === xdaiNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_xdaiUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Perform the transfer native token with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer ERC20 token with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - - try { - assert.isTrue( - provider._isProvider, - message.vali_erc20Transfer_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits( - data.erc20_value, - data.erc20_usdc_decimal - ), - ] - ); - - try { - assert.isNotEmpty( - transactionData, - message.vali_erc20Contract_transferFrom - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_transferFrom); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_xdaiUSDC, - data: transactionData, - }); - - try { - assert.isNotEmpty(userOpsBatch.to, message.vali_addTransaction_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.value[0], - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await xdaiMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer ERC721 NFT token with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get erc721 Contract Interface - let erc721Interface; - let erc721Data; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Data = erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.tokenId, - ]); - - try { - assert.isNotEmpty( - erc721Data, - message.vali_erc721Transfer_contractInterface - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_contractInterface); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.nft_tokenAddress, - data: erc721Data, - }); - - try { - assert.isNotEmpty( - userOpsBatch.to[0], - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.data[0], - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.value[0], - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - let op; - try { - op = await xdaiMainNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer native token by passing callGasLimit with valid details on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - // passing callGasLimit as 40000 to manually set it - let op; - try { - op = await xdaiMainNetSdk.estimate({ callGasLimit: 40000 }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await xdaiMainNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the concurrent userops with valid details on the xdai network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount = 1; - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await xdaiMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await xdaiMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the incorrect To Address while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.incorrectRecipient, // incorrect to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_9); - assert.fail(message.fail_estimateTransaction_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.invalidRecipient, // invalid to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_10); - assert.fail(message.fail_estimateTransaction_10); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_9); - console.log(message.vali_estimateTransaction_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the invalid Value while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseUnits(data.invalidValue), // invalid value - }); - - addContext(test, message.fail_estimateTransaction_11); - assert.fail(message.fail_estimateTransaction_11); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_estimateTransaction_10); - console.log(message.vali_estimateTransaction_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the very small Value while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseUnits(data.smallValue), // very small value - }); - - addContext(test, message.fail_estimateTransaction_12); - assert.fail(message.fail_estimateTransaction_12); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_estimateTransaction_11); - console.log(message.vali_estimateTransaction_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without adding transaction to the batch while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit with the incorrect To Address while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.incorrectRecipient, // incorrect to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await xdaiMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_9); - assert.fail(message.fail_estimateTransaction_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.invalidRecipient, // invalid to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await xdaiMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_10); - assert.fail(message.fail_estimateTransaction_10); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit without adding transaction to the batch while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await xdaiMainNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.empty_batch) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid provider netowrk details while Getting the Decimal from ERC20 Contract on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.invalidProviderNetwork_xdai // invalid provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without provider netowrk details while Getting the Decimal from ERC20 Contract on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider(); // without provider - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with other provider netowrk details while Getting the Decimal from ERC20 Contract on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.otherProviderNetwork_xdai // other provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect Token Address details while Getting the Decimal from ERC20 Contract on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.incorrectTokenAddress_xdaiUSDC, // incorrect token address - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid Token Address i.e. missing character details while Getting the Decimal from ERC20 Contract on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.invalidTokenAddress_xdaiUSDC, // invalid token address - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with null Token Address details while Getting the Decimal from ERC20 Contract on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - try { - new ethers.Contract(null, ERC20_ABI, provider); // null token address - - addContext(test, message.fail_erc20Transfer_3); - assert.fail(message.fail_erc20Transfer_3); - } catch (e) { - if (e.reason === constant.contract_address_2) { - addContext(test, message.vali_erc20Transfer_3); - console.log(message.vali_erc20Transfer_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect transfer method name while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid value while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.invalidValue, data.erc20_usdc_decimal), // invalid value - ]); - - addContext(test, message.fail_erc20Transfer_5); - assert.fail(message.fail_erc20Transfer_5); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_erc20Transfer_5); - console.log(message.vali_erc20Transfer_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with very small value while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.smallValue, data.erc20_usdc_decimal), // very small value - ]); - - addContext(test, message.fail_erc20Transfer_6); - assert.fail(message.fail_erc20Transfer_6); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_erc20Transfer_6); - console.log(message.vali_erc20Transfer_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without value while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ]); - - addContext(test, message.fail_erc20Transfer_7); - assert.fail(message.fail_erc20Transfer_7); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc20Transfer_7); - console.log(message.vali_erc20Transfer_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect recipient while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.incorrectRecipient, // incorrect recipient address - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_8); - assert.fail(message.fail_erc20Transfer_8); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Transfer_8); - console.log(message.vali_erc20Transfer_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid recipient i.e. missing character while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.invalidRecipient, // invalid recipient address - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_9); - assert.fail(message.fail_erc20Transfer_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Transfer_9); - console.log(message.vali_erc20Transfer_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without recipient while Getting the transferFrom encoded data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - - addContext(test, message.fail_erc20Transfer_10); - assert.fail(message.fail_erc20Transfer_10); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc20Transfer_10); - console.log(message.vali_erc20Transfer_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the incorrect Token Address while adding transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits( - data.erc20_value, - data.erc20_usdc_decimal - ), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.incorrectTokenAddress_xdaiUSDC, // Incorrect Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_16); - assert.fail(message.fail_estimateTransaction_16); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_15); - console.log(message.vali_estimateTransaction_15); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_16); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the invalid Token Address i.e. missing character while adding transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits( - data.erc20_value, - data.erc20_usdc_decimal - ), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.invalidTokenAddress_xdaiUSDC, // Invalid Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_17); - assert.fail(message.fail_estimateTransaction_17); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_16); - console.log(message.vali_estimateTransaction_16); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_17); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the null Token Address while adding transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits( - data.erc20_value, - data.erc20_usdc_decimal - ), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - to: null, // Null Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_18); - assert.fail(message.fail_estimateTransaction_18); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_17); - console.log(message.vali_estimateTransaction_17); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_18); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without Token Address while adding transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits( - data.erc20_value, - data.erc20_usdc_decimal - ), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await xdaiMainNetSdk.addUserOpsToBatch({ - data: transactionData, // without tokenAddress - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_19); - assert.fail(message.fail_estimateTransaction_19); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_18); - console.log(message.vali_estimateTransaction_18); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_19); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without adding transaction to the batch while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_xdai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_xdaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, data.erc20_usdc_decimal), - ]); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect Sender Address while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.incorrectSender, // incorrect sender address - data.recipient, - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_1); - assert.fail(message.fail_erc721Transfer_1); - } catch (e) { - if (e.reason.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc721Transfer_1); - console.log(message.vali_erc721Transfer_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with invalid Sender Address i.e. missing character while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.invalidSender, // invalid sender address - data.recipient, - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_2); - assert.fail(message.fail_erc721Transfer_2); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc721Transfer_2); - console.log(message.vali_erc721Transfer_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without Sender Address while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.recipient, // not added sender address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_3); - assert.fail(message.fail_erc721Transfer_3); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_3); - console.log(message.vali_erc721Transfer_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect Recipient Address while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.incorrectRecipient, // incorrect recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_4); - assert.fail(message.fail_erc721Transfer_4); - } catch (e) { - if (e.reason.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc721Transfer_4); - console.log(message.vali_erc721Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with invalid Recipient Address i.e. missing character while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.invalidRecipient, // invalid recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_5); - assert.fail(message.fail_erc721Transfer_5); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc721Transfer_5); - console.log(message.vali_erc721Transfer_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without Recipient Address while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, // not added recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_6); - assert.fail(message.fail_erc721Transfer_6); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_6); - console.log(message.vali_erc721Transfer_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect tokenId while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.incorrectTokenId, // incorrect tokenid - ]); - - addContext(message.fail_erc721Transfer_7); - assert.fail(message.fail_erc721Transfer_7); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_erc721Transfer_7); - console.log(message.vali_erc721Transfer_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without tokenId while creating the NFT Data on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, // not added tokenid - ]); - - addContext(test, message.fail_erc721Transfer_8); - assert.fail(message.fail_erc721Transfer_8); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_8); - console.log(message.vali_erc721Transfer_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT Token without adding transaction to the batch while estimate the added transactions to the batch on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.tokenId, - ]); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_contractInterface); - } - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await xdaiMainNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops with invalid concurrentUseropsCount on the xdai network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount = -5; // invalid concurrent userops - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await xdaiMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await xdaiMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops without concurrentUseropsCount on the xdai network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await xdaiMainNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await xdaiMainNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await xdaiMainNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount; // invalid concurrent userops - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await xdaiMainNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await xdaiMainNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - if (eString === 'Error') { - console.warn(message.skip_transaction_error); - addContext(test, message.skip_transaction_error); - test.skip(); - } else { - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await xdaiMainNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops with non deployed address on the xdai network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - try { - if ((await provider.getCode(data.eoaAddress)).length <= 2) { - addContext(test, message.vali_deployAddress_1); - console.log(message.vali_deployAddress_1); - return; - } - - addContext(test, message.fail_deployAddress_1); - assert.fail(message.fail_deployAddress_1); - } catch (e) { - const errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_2)) { - addContext(test, message.vali_deployAddress_2); - console.log(message.vali_deployAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deployAddress_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/testnet/connext/newWallet_connext.spec.js b/test/specs/testnet/connext/newWallet_connext.spec.js new file mode 100644 index 0000000..9aab86b --- /dev/null +++ b/test/specs/testnet/connext/newWallet_connext.spec.js @@ -0,0 +1,1360 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { utils, ethers } from 'ethers'; +import { BridgingProvider } from '@etherspot/prime-sdk/dist/sdk/data/index.js'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidChainId, + randomInvalidTokenAddress, + randomToChainId, + randomTokenAddress, +} from '../../../utils/sharedData_testnet.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let testnetPrimeSdk; +let nativeAddress = null; +let dataService; +let runTest; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Validate the connext endpoints on the TestNet (with new wallet)', function () { + before(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }); + + it( + 'SMOKE: Validate the all supported assets with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let allSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + try { + allSupportedAssets = await dataService.getSupportedAssets({}); + + try { + assert.isNotEmpty( + allSupportedAssets[0].symbol, + message.vali_connext_getSupportedAssets_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + allSupportedAssets[0].address, + message.vali_connext_getSupportedAssets_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + allSupportedAssets[0].decimals, + message.vali_connext_getSupportedAssets_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + allSupportedAssets[0].chainId, + message.vali_connext_getSupportedAssets_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + allSupportedAssets[0].icon, + message.vali_connext_getSupportedAssets_icon + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the supported assets with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let supportedAssets; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + try { + supportedAssets = await dataService.getSupportedAssets({ + chainId: randomChainId, + provider: BridgingProvider.Connext, + }); + + try { + assert.isNotEmpty( + supportedAssets[0].symbol, + message.vali_connext_getSupportedAssets_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + supportedAssets[0].address, + message.vali_connext_getSupportedAssets_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + supportedAssets[0].decimals, + message.vali_connext_getSupportedAssets_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.strictEqual( + supportedAssets[0].chainId, + randomChainId, + message.vali_connext_getSupportedAssets_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + supportedAssets[0].icon, + message.vali_connext_getSupportedAssets_icon + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get quotes with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + try { + assert.isNotEmpty( + quotes[0].data, + message.vali_connext_getQuotes_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes[0].to, + message.vali_connext_getQuotes_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes[0].value, + message.vali_connext_getQuotes_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_3); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transaction status with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomChainId, + toChainId: randomToChainId, + transactionHash: userOpsReceipt.receipt.transactionHash, + provider: BridgingProvider.Connext, + }); + + try { + assert.isNotEmpty( + transactionStatus.status, + message.vali_connext_getTransactionStatus_status + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionStatus.transactionHash, + message.vali_connext_getTransactionStatus_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionStatus.connextscanUrl, + message.vali_connext_getTransactionStatus_connextscanUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_4); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid sender address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.invalidSender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_7); + assert.fail(message.fail_connext_7); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_7 + ) { + addContext(test, message.vali_connext_2); + console.log(message.vali_connext_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without sender address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_8); + assert.fail(message.fail_connext_8); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_7 + ) { + addContext(test, message.vali_connext_3); + console.log(message.vali_connext_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with incorrect sender address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.incorrectSender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_9); + assert.fail(message.fail_connext_9); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_7 + ) { + addContext(test, message.vali_connext_4); + console.log(message.vali_connext_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid recepient address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.invalidRecipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_10); + assert.fail(message.fail_connext_10); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_8 + ) { + addContext(test, message.vali_connext_5); + console.log(message.vali_connext_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without recepient address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_11); + assert.fail(message.fail_connext_11); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_8 + ) { + addContext(test, message.vali_connext_6); + console.log(message.vali_connext_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with incorrect recepient address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.incorrectRecipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_12); + assert.fail(message.fail_connext_12); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_8 + ) { + addContext(test, message.vali_connext_7); + console.log(message.vali_connext_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_12); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without fromChainid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_13); + assert.fail(message.fail_connext_13); + } catch (e) { + if (e.message === constant.invalid_address_9) { + addContext(test, message.vali_connext_8); + console.log(message.vali_connext_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without toChainid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_14); + assert.fail(message.fail_connext_14); + } catch (e) { + if (e.message === constant.invalid_address_10) { + addContext(test, message.vali_connext_9); + console.log(message.vali_connext_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_14); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid from token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomInvalidTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_15); + assert.fail(message.fail_connext_15); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_11 + ) { + addContext(test, message.vali_connext_10); + console.log(message.vali_connext_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_15); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with incorrect from token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomIncorrectTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_16); + assert.fail(message.fail_connext_16); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_11 + ) { + addContext(test, message.vali_connext_11); + console.log(message.vali_connext_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_16); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without from token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_17); + assert.fail(message.fail_connext_17); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_11 + ) { + addContext(test, message.vali_connext_12); + console.log(message.vali_connext_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_17); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid value on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.invalidValue, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_18); + assert.fail(message.fail_connext_18); + } catch (e) { + if (e.reason === constant.invalid_value_1) { + addContext(test, message.vali_connext_13); + console.log(message.vali_connext_13); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_18); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with small value on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.smallValue, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_19); + assert.fail(message.fail_connext_19); + } catch (e) { + if (e.reason === constant.invalid_value_2) { + addContext(test, message.vali_connext_14); + console.log(message.vali_connext_14); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_19); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without value on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_20); + assert.fail(message.fail_connext_20); + } catch (e) { + if ( + e.errors[0].constraints.IsBigNumberish === + constant.invalid_bignumber_2 + ) { + addContext(test, message.vali_connext_15); + console.log(message.vali_connext_15); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_20); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without slippage on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_21); + assert.fail(message.fail_connext_21); + } catch (e) { + if (e.message === constant.invalid_address_13) { + addContext(test, message.vali_connext_16); + console.log(message.vali_connext_16); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_21); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status without fromChainId on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + toChainId: randomToChainId, + transactionHash: data.transactionHash, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_22); + assert.fail(message.fail_connext_22); + } catch (e) { + if ( + e.errors[0].constraints.isPositive === constant.invalid_chainid_4 + ) { + addContext(test, message.vali_connext_17); + console.log(message.vali_connext_17); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_22); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status without toChainId on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + transactionHash: data.transactionHash, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_23); + assert.fail(message.fail_connext_23); + } catch (e) { + if ( + e.errors[0].constraints.isPositive === constant.invalid_chainid_5 + ) { + addContext(test, message.vali_connext_18); + console.log(message.vali_connext_18); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_23); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status with invalid transactionHash on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + toChainId: randomToChainId, + transactionHash: data.invalid_transactionHash, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_24); + assert.fail(message.fail_connext_24); + } catch (e) { + if ( + e.errors[0].constraints.isHex === constant.transactionHash_32hex + ) { + addContext(test, message.vali_connext_19); + console.log(message.vali_connext_19); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_24); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status with incorrect transactionHash on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + toChainId: randomToChainId, + transactionHash: data.incorrect_transactionHash, + provider: BridgingProvider.Connext, + }); + + if (transactionStatus.status === constant.invalid_chainid_6) { + addContext(test, message.vali_connext_20); + console.log(message.vali_connext_20); + } else { + addContext(test, eString); + assert.fail(message.fail_connext_25); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_25); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status without transactionHash on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + toChainId: randomToChainId, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_26); + assert.fail(message.fail_connext_26); + } catch (e) { + if ( + e.errors[0].constraints.isHex === constant.transactionHash_32hex + ) { + addContext(test, message.vali_connext_21); + console.log(message.vali_connext_21); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_26); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/testnet/connext/oldWallet_connext.spec.js b/test/specs/testnet/connext/oldWallet_connext.spec.js new file mode 100644 index 0000000..6437ab5 --- /dev/null +++ b/test/specs/testnet/connext/oldWallet_connext.spec.js @@ -0,0 +1,1429 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { utils, constants, BigNumber, ethers } from 'ethers'; +import { BridgingProvider } from '@etherspot/prime-sdk/dist/sdk/data/index.js'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidChainId, + randomInvalidTokenAddress, + randomToChainId, + randomTokenAddress, +} from '../../../utils/sharedData_testnet.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; + +let testnetPrimeSdk; +let etherspotWalletAddress; +let nativeAddress = null; +let dataService; +let runTest; + +describe('Validate the connext endpoints on the TestNet (with old wallet)', function () { + before(async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + + try { + assert.strictEqual( + testnetPrimeSdk.state.EOAAddress, + data.eoaAddress, + message.vali_eoa_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get EtherspotWallet address + try { + etherspotWalletAddress = + await testnetPrimeSdk.getCounterFactualAddress(); + + try { + assert.strictEqual( + etherspotWalletAddress, + data.sender, + message.vali_smart_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e.message); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + var test = this; + + await customRetryAsync(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + it( + 'SMOKE: Validate the all supported assets with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let allSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + try { + allSupportedAssets = await dataService.getSupportedAssets({}); + + try { + assert.isNotEmpty( + allSupportedAssets[0].symbol, + message.vali_connext_getSupportedAssets_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + allSupportedAssets[0].address, + message.vali_connext_getSupportedAssets_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + allSupportedAssets[0].decimals, + message.vali_connext_getSupportedAssets_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + allSupportedAssets[0].chainId, + message.vali_connext_getSupportedAssets_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + allSupportedAssets[0].icon, + message.vali_connext_getSupportedAssets_icon + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the supported assets with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let supportedAssets; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + try { + supportedAssets = await dataService.getSupportedAssets({ + chainId: randomChainId, + provider: BridgingProvider.Connext, + }); + + try { + assert.isNotEmpty( + supportedAssets[0].symbol, + message.vali_connext_getSupportedAssets_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + supportedAssets[0].address, + message.vali_connext_getSupportedAssets_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + supportedAssets[0].decimals, + message.vali_connext_getSupportedAssets_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.strictEqual( + supportedAssets[0].chainId, + randomChainId, + message.vali_connext_getSupportedAssets_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + supportedAssets[0].icon, + message.vali_connext_getSupportedAssets_icon + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get quotes with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + try { + assert.isNotEmpty( + quotes[0].data, + message.vali_connext_getQuotes_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes[0].to, + message.vali_connext_getQuotes_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes[0].value, + message.vali_connext_getQuotes_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_3); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transaction status with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomChainId, + toChainId: randomToChainId, + transactionHash: userOpsReceipt.receipt.transactionHash, + provider: BridgingProvider.Connext, + }); + + try { + assert.isNotEmpty( + transactionStatus.status, + message.vali_connext_getTransactionStatus_status + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionStatus.transactionHash, + message.vali_connext_getTransactionStatus_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionStatus.connextscanUrl, + message.vali_connext_getTransactionStatus_connextscanUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_4); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid sender address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.invalidSender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_7); + assert.fail(message.fail_connext_7); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_7 + ) { + addContext(test, message.vali_connext_2); + console.log(message.vali_connext_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without sender address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_8); + assert.fail(message.fail_connext_8); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_7 + ) { + addContext(test, message.vali_connext_3); + console.log(message.vali_connext_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with incorrect sender address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.incorrectSender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_9); + assert.fail(message.fail_connext_9); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_7 + ) { + addContext(test, message.vali_connext_4); + console.log(message.vali_connext_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid recepient address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.invalidRecipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_10); + assert.fail(message.fail_connext_10); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_8 + ) { + addContext(test, message.vali_connext_5); + console.log(message.vali_connext_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without recepient address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_11); + assert.fail(message.fail_connext_11); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_8 + ) { + addContext(test, message.vali_connext_6); + console.log(message.vali_connext_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with incorrect recepient address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.incorrectRecipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_12); + assert.fail(message.fail_connext_12); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_8 + ) { + addContext(test, message.vali_connext_7); + console.log(message.vali_connext_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_12); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without fromChainid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_13); + assert.fail(message.fail_connext_13); + } catch (e) { + if (e.message === constant.invalid_address_9) { + addContext(test, message.vali_connext_8); + console.log(message.vali_connext_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without toChainid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_14); + assert.fail(message.fail_connext_14); + } catch (e) { + if (e.message === constant.invalid_address_10) { + addContext(test, message.vali_connext_9); + console.log(message.vali_connext_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_14); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid from token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomInvalidTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_15); + assert.fail(message.fail_connext_15); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_11 + ) { + addContext(test, message.vali_connext_10); + console.log(message.vali_connext_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_15); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with incorrect from token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomIncorrectTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_16); + assert.fail(message.fail_connext_16); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_11 + ) { + addContext(test, message.vali_connext_11); + console.log(message.vali_connext_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_16); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without from token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromAmount: utils.parseUnits(data.swap_value, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_17); + assert.fail(message.fail_connext_17); + } catch (e) { + if ( + e.errors[0].constraints.isAddress === constant.invalid_address_11 + ) { + addContext(test, message.vali_connext_12); + console.log(message.vali_connext_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_17); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with invalid value on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.invalidValue, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_18); + assert.fail(message.fail_connext_18); + } catch (e) { + if (e.reason === constant.invalid_value_1) { + addContext(test, message.vali_connext_13); + console.log(message.vali_connext_13); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_18); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes with small value on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.smallValue, 18), + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_19); + assert.fail(message.fail_connext_19); + } catch (e) { + if (e.reason === constant.invalid_value_2) { + addContext(test, message.vali_connext_14); + console.log(message.vali_connext_14); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_19); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without value on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + slippage: 0.1, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_20); + assert.fail(message.fail_connext_20); + } catch (e) { + if ( + e.errors[0].constraints.IsBigNumberish === + constant.invalid_bignumber_2 + ) { + addContext(test, message.vali_connext_15); + console.log(message.vali_connext_15); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_20); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get quotes without slippage on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let quotes; + if (runTest) { + await customRetryAsync(async function () { + try { + quotes = await dataService.getQuotes({ + fromAddress: data.sender, + toAddress: data.recipient, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromToken: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 18), + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_21); + assert.fail(message.fail_connext_21); + } catch (e) { + if (e.message === constant.invalid_address_13) { + addContext(test, message.vali_connext_16); + console.log(message.vali_connext_16); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_21); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status without fromChainId on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + toChainId: randomToChainId, + transactionHash: data.transactionHash, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_22); + assert.fail(message.fail_connext_22); + } catch (e) { + if ( + e.errors[0].constraints.isPositive === constant.invalid_chainid_4 + ) { + addContext(test, message.vali_connext_17); + console.log(message.vali_connext_17); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_22); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status without toChainId on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + transactionHash: data.transactionHash, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_23); + assert.fail(message.fail_connext_23); + } catch (e) { + if ( + e.errors[0].constraints.isPositive === constant.invalid_chainid_5 + ) { + addContext(test, message.vali_connext_18); + console.log(message.vali_connext_18); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_23); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status with invalid transactionHash on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + toChainId: randomToChainId, + transactionHash: data.invalid_transactionHash, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_24); + assert.fail(message.fail_connext_24); + } catch (e) { + if ( + e.errors[0].constraints.isHex === constant.transactionHash_32hex + ) { + addContext(test, message.vali_connext_19); + console.log(message.vali_connext_19); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_24); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status with incorrect transactionHash on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + toChainId: randomToChainId, + transactionHash: data.incorrect_transactionHash, + provider: BridgingProvider.Connext, + }); + + if (transactionStatus.status === constant.invalid_chainid_6) { + addContext(test, message.vali_connext_20); + console.log(message.vali_connext_20); + } else { + addContext(test, eString); + assert.fail(message.fail_connext_25); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_25); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction status without transactionHash on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the transaction status + let transactionStatus; + try { + transactionStatus = await dataService.getTransactionStatus({ + fromChainId: randomInvalidChainId, + toChainId: randomToChainId, + provider: BridgingProvider.Connext, + }); + + addContext(test, message.fail_connext_26); + assert.fail(message.fail_connext_26); + } catch (e) { + if ( + e.errors[0].constraints.isHex === constant.transactionHash_32hex + ) { + addContext(test, message.vali_connext_21); + console.log(message.vali_connext_21); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_connext_26); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.connext_insufficientBalance); + console.warn(message.connext_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/testnet/connext/sepolia.spec.js b/test/specs/testnet/connext/sepolia.spec.js deleted file mode 100644 index d0482ae..0000000 --- a/test/specs/testnet/connext/sepolia.spec.js +++ /dev/null @@ -1,1276 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils } from '@etherspot/prime-sdk'; -import { utils, constants, BigNumber, ethers } from 'ethers'; -import { BridgingProvider } from '@etherspot/prime-sdk/dist/sdk/data/index.js'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let sepoliaTestNetSdk; -let sepoliaEtherspotWalletAddress; -let sepoliaNativeAddress = null; -let sepoliaDataService; -let runTest; - -describe('The PrimeSDK, Validate the connext endpoints with sepolia network on the TestNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - sepoliaTestNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.sepolia_chainid), - } - ); - - try { - assert.strictEqual( - sepoliaTestNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - sepoliaEtherspotWalletAddress = - await sepoliaTestNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - sepoliaEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - sepoliaDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await sepoliaDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.sepolia_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === sepoliaNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_sepoliaUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the all supported assets with valid details on the sepolia network', async function () { - var test = this; - let allSupportedAssets; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - allSupportedAssets = await sepoliaDataService.getSupportedAssets({}); - - try { - assert.isNotEmpty( - allSupportedAssets[0].symbol, - message.vali_connext_getSupportedAssets_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - allSupportedAssets[0].address, - message.vali_connext_getSupportedAssets_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - allSupportedAssets[0].decimals, - message.vali_connext_getSupportedAssets_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - allSupportedAssets[0].chainId, - message.vali_connext_getSupportedAssets_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - allSupportedAssets[0].icon, - message.vali_connext_getSupportedAssets_icon - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the supported assets with valid details on the sepolia network', async function () { - var test = this; - let supportedAssets; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - supportedAssets = await sepoliaDataService.getSupportedAssets({ - chainId: data.sepolia_chainid, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - supportedAssets[0].symbol, - message.vali_connext_getSupportedAssets_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - supportedAssets[0].address, - message.vali_connext_getSupportedAssets_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - supportedAssets[0].decimals, - message.vali_connext_getSupportedAssets_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.strictEqual( - supportedAssets[0].chainId, - data.sepolia_chainid, - message.vali_connext_getSupportedAssets_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - supportedAssets[0].icon, - message.vali_connext_getSupportedAssets_icon - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get quotes with valid details on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - quotes[0].data, - message.vali_connext_getQuotes_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(quotes[0].to, message.vali_connext_getQuotes_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - quotes[0].value, - message.vali_connext_getQuotes_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transaction status with valid details on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await sepoliaTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await sepoliaTestNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await sepoliaTestNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await sepoliaDataService.getTransactionStatus({ - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - transactionHash: userOpsReceipt.receipt.transactionHash, - provider: BridgingProvider.Connext, - }); - - try { - assert.isNotEmpty( - transactionStatus.status, - message.vali_connext_getTransactionStatus_status - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionStatus.transactionHash, - message.vali_connext_getTransactionStatus_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionStatus.connextscanUrl, - message.vali_connext_getTransactionStatus_connextscanUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the supported assets with invalid chainid on the sepolia network', async function () { - var test = this; - let supportedAssets; - if (runTest) { - await customRetryAsync(async function () { - try { - supportedAssets = await sepoliaDataService.getSupportedAssets({ - chainId: data.invalid_sepolia_chainid, - provider: BridgingProvider.Connext, - }); - - if (supportedAssets.length === 0) { - addContext(test, message.vali_connext_1); - console.log(message.vali_connext_1); - } else { - addContext(test, message.fail_connext_5); - assert.fail(message.fail_connext_5); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid sender address on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.invalidSender, - toAddress: data.recipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_7); - assert.fail(message.fail_connext_7); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_2); - console.log(message.vali_connext_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without sender address on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - toAddress: data.recipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_8); - assert.fail(message.fail_connext_8); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_3); - console.log(message.vali_connext_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect sender address on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.incorrectSender, - toAddress: data.recipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_9); - assert.fail(message.fail_connext_9); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_7 - ) { - addContext(test, message.vali_connext_4); - console.log(message.vali_connext_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid recepient address on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.invalidRecipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_10); - assert.fail(message.fail_connext_10); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_5); - console.log(message.vali_connext_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without recepient address on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_11); - assert.fail(message.fail_connext_11); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_6); - console.log(message.vali_connext_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect recepient address on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.incorrectRecipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_12); - assert.fail(message.fail_connext_12); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_8 - ) { - addContext(test, message.vali_connext_7); - console.log(message.vali_connext_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without fromChainid details on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_13); - assert.fail(message.fail_connext_13); - } catch (e) { - if (e.message === constant.invalid_address_9) { - addContext(test, message.vali_connext_8); - console.log(message.vali_connext_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without toChainid details on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.sepolia_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_14); - assert.fail(message.fail_connext_14); - } catch (e) { - if (e.message === constant.invalid_address_10) { - addContext(test, message.vali_connext_9); - console.log(message.vali_connext_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid from token address on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.invalidTokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_15); - assert.fail(message.fail_connext_15); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_10); - console.log(message.vali_connext_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_15); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with incorrect from token address on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.incorrectTokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_16); - assert.fail(message.fail_connext_16); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_11); - console.log(message.vali_connext_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_16); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without from token address on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromAmount: utils.parseUnits(data.swap_value, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_17); - assert.fail(message.fail_connext_17); - } catch (e) { - if ( - e.errors[0].constraints.isAddress === constant.invalid_address_11 - ) { - addContext(test, message.vali_connext_12); - console.log(message.vali_connext_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_17); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with invalid value on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.invalidValue, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_18); - assert.fail(message.fail_connext_18); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_connext_13); - console.log(message.vali_connext_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_18); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes with small value on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.smallValue, 18), - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_19); - assert.fail(message.fail_connext_19); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_connext_14); - console.log(message.vali_connext_14); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_19); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without value on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - slippage: 0.1, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_20); - assert.fail(message.fail_connext_20); - } catch (e) { - if ( - e.errors[0].constraints.IsBigNumberish === - constant.invalid_bignumber_2 - ) { - addContext(test, message.vali_connext_15); - console.log(message.vali_connext_15); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_20); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get quotes without slippage on the sepolia network', async function () { - var test = this; - let quotes; - if (runTest) { - await customRetryAsync(async function () { - try { - quotes = await sepoliaDataService.getQuotes({ - fromAddress: data.sender, - toAddress: data.recipient, - fromChainId: data.sepolia_chainid, - toChainId: data.matic_chainid, - fromToken: data.tokenAddress_sepoliaUSDC, - fromAmount: utils.parseUnits(data.swap_value, 18), - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_21); - assert.fail(message.fail_connext_21); - } catch (e) { - if (e.message === constant.invalid_address_13) { - addContext(test, message.vali_connext_16); - console.log(message.vali_connext_16); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_21); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without fromChainId on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await sepoliaDataService.getTransactionStatus({ - toChainId: data.matic_chainid, - transactionHash: data.transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_22); - assert.fail(message.fail_connext_22); - } catch (e) { - if ( - e.errors[0].constraints.isPositive === constant.invalid_chainid_4 - ) { - addContext(test, message.vali_connext_17); - console.log(message.vali_connext_17); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_22); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without toChainId on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await sepoliaDataService.getTransactionStatus({ - fromChainId: data.invalid_sepolia_chainid, - transactionHash: data.transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_23); - assert.fail(message.fail_connext_23); - } catch (e) { - if ( - e.errors[0].constraints.isPositive === constant.invalid_chainid_5 - ) { - addContext(test, message.vali_connext_18); - console.log(message.vali_connext_18); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_23); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status with invalid transactionHash on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await sepoliaDataService.getTransactionStatus({ - fromChainId: data.invalid_sepolia_chainid, - toChainId: data.matic_chainid, - transactionHash: data.invalid_transactionHash, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_24); - assert.fail(message.fail_connext_24); - } catch (e) { - if ( - e.errors[0].constraints.isHex === constant.transactionHash_32hex - ) { - addContext(test, message.vali_connext_19); - console.log(message.vali_connext_19); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_24); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status with incorrect transactionHash on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await sepoliaDataService.getTransactionStatus({ - fromChainId: data.invalid_sepolia_chainid, - toChainId: data.matic_chainid, - transactionHash: data.incorrect_transactionHash, - provider: BridgingProvider.Connext, - }); - - if (transactionStatus.status === constant.invalid_chainid_6) { - addContext(test, message.vali_connext_20); - console.log(message.vali_connext_20); - } else { - addContext(test, eString); - assert.fail(message.fail_connext_25); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_25); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction status without transactionHash on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the transaction status - let transactionStatus; - try { - transactionStatus = await sepoliaDataService.getTransactionStatus({ - fromChainId: data.invalid_sepolia_chainid, - toChainId: data.matic_chainid, - provider: BridgingProvider.Connext, - }); - - addContext(test, message.fail_connext_26); - assert.fail(message.fail_connext_26); - } catch (e) { - if ( - e.errors[0].constraints.isHex === constant.transactionHash_32hex - ) { - addContext(test, message.vali_connext_21); - console.log(message.vali_connext_21); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_connext_26); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.connext_insufficientBalance); - console.warn(message.connext_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/testnet/listAndRates/newWallet_listAndRates.spec.js b/test/specs/testnet/listAndRates/newWallet_listAndRates.spec.js new file mode 100644 index 0000000..79cbc79 --- /dev/null +++ b/test/specs/testnet/listAndRates/newWallet_listAndRates.spec.js @@ -0,0 +1,849 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { utils } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import helper from '../../../utils/helper.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidChainId, + randomInvalidTokenAddress, + randomToTokenAddress, + randomTokenAddress, + randomTokenAddressUsdt, +} from '../../../utils/sharedData_testnet.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let testnetPrimeSdk; +let nativeAddress = null; +let dataService; +let runTest; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Validate the NFT List, Token List and Exchange Rates details on the TestNet (with new wallet)', function () { + before(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }); + + it( + 'SMOKE: Validate the NFT List on the ' + randomChainName + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let nfts; + try { + nfts = await dataService.getNftList({ + chainId: Number(randomChainId), + account: data.sender, + }); + + if (nfts.items.length > 0) { + addContext(test, message.pass_nft_list_1); + console.log(message.pass_nftList_1); + + try { + assert.isNotEmpty( + nfts.items[0].contractAddress, + message.vali_nftList_contractAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + nfts.items[0].tokenType, + message.vali_nftList_tokenType + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + nfts.items[0].balance, + message.vali_nftList_balance + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + nfts.items[0].items[0].tokenId, + message.vali_nftList_items_tokenId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + nfts.items[0].items[0].amount, + message.vali_nftList_items_amount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_nftList_2); + console.log(message.pass_nftList_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_nftList_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the Token List on the ' + randomChainName + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let tokenLists; + let tokenListTokens; + try { + tokenLists = await dataService.getTokenLists({ + chainId: randomChainId, + }); + + if (tokenLists.length > 0) { + console.log(message.pass_tokenList_1); + addContext(test, message.pass_tokenList_1); + + try { + assert.isNotEmpty( + tokenLists[0].name, + message.vali_tokenList_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenLists[0].endpoint, + message.vali_tokenList_endpoint + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_tokenList_2); + console.log(message.pass_tokenList_2); + } + + tokenListTokens = await dataService.getTokenListTokens({ + chainId: randomChainId, + }); + + if (tokenListTokens.length > 0) { + console.log(message.pass_tokenList_3); + addContext(test, message.pass_tokenList_3); + + try { + assert.isNotEmpty( + tokenListTokens[0].address, + message.vali_tokenListTokens_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].name, + message.vali_tokenListTokens_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].symbol, + message.vali_tokenListTokens_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].decimals, + message.vali_tokenListTokens_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].logoURI, + message.vali_tokenListTokens_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].chainId, + message.vali_tokenListTokens_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_tokenList_4); + console.log(message.pass_tokenList_4); + } + + if (tokenLists.length > 0) { + const { name } = tokenLists[0]; + + tokenListTokens = await dataService.getTokenListTokens({ + chainId: randomChainId, + name, + }); + + if (tokenListTokens.length > 0) { + addContext(test, message.pass_tokenList_5); + console.log(message.pass_tokenList_5); + + try { + assert.isNotEmpty( + tokenListTokens[0].address, + message.vali_selectedTokenListTokens_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].name, + message.vali_selectedTokenListTokens_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].symbol, + message.vali_selectedTokenListTokens_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].decimals, + message.vali_selectedTokenListTokens_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].logoURI, + message.vali_selectedTokenListTokens_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].chainId, + message.vali_selectedTokenListTokens_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_tokenList_6); + console.log(message.pass_tokenList_6); + } + } else { + addContext(test, message.pass_tokenList_2); + console.log(message.pass_tokenList_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_tokenList_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.tokenList_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the Exchange Rates on the ' + randomChainName + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let TOKEN_LIST; + let rates; + let requestPayload; + try { + TOKEN_LIST = [randomTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + rates = await dataService.fetchExchangeRates(requestPayload); + + for (let i = 0; i < rates.items.length; i++) { + try { + assert.isNotEmpty( + rates.items[i].address, + message.vali_exchangeRates_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].eth, + message.vali_exchangeRates_eth + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].eur, + message.vali_exchangeRates_eur + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].gbp, + message.vali_exchangeRates_gbp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].usd, + message.vali_exchangeRates_usd + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the NFT List with invalid account address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + await dataService.getNftList({ + chainId: Number(randomChainId), + account: data.invalidSender, + }); + + addContext(test, message.fail_nftList_2); + assert.fail(message.fail_nftList_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.pass_nftList_3); + console.log(message.pass_nftList_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_nftList_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.nftList_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the NFT List with incorrect account address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + await dataService.getNftList({ + chainId: Number(randomChainId), + account: data.incorrectSender, + }); + + addContext(test, message.fail_nftList_3); + assert.fail(message.fail_nftList_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.pass_nftList_4); + console.log(message.pass_nftList_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_nftList_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.nftList_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with other token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + try { + TOKEN_LIST = [ + randomTokenAddress, + randomTokenAddressUsdt, + randomToTokenAddress, + ]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with invalid token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + try { + TOKEN_LIST = [randomInvalidTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_3); + assert.fail(message.fail_exchangeRates_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_1); + console.log(message.pass_exchangeRates_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with incorrect token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + + try { + TOKEN_LIST = [randomIncorrectTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_4); + assert.fail(message.fail_exchangeRates_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_2); + console.log(message.pass_exchangeRates_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates without token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + let TOKEN_LIST = []; + + let requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_5); + assert.fail(message.fail_exchangeRates_5); + } catch (e) { + let error = e.message; + if (error.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_3); + console.log(message.pass_exchangeRates_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with invalid chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + + try { + TOKEN_LIST = [randomTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomInvalidChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_4); + console.log(message.pass_exchangeRates_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates without chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + + try { + TOKEN_LIST = [randomTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_7); + assert.fail(message.fail_exchangeRates_7); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_5); + console.log(message.pass_exchangeRates_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/testnet/listAndRates/oldWallet_listAndRates.spec.js b/test/specs/testnet/listAndRates/oldWallet_listAndRates.spec.js new file mode 100644 index 0000000..2c4256c --- /dev/null +++ b/test/specs/testnet/listAndRates/oldWallet_listAndRates.spec.js @@ -0,0 +1,918 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { utils } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidChainId, + randomInvalidTokenAddress, + randomToTokenAddress, + randomTokenAddress, + randomTokenAddressUsdt, +} from '../../../utils/sharedData_testnet.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; + +let testnetPrimeSdk; +let etherspotWalletAddress; +let nativeAddress = null; +let dataService; +let runTest; + +describe('Validate the NFT List, Token List and Exchange Rates details on the TestNet (with old wallet)', function () { + before(async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + + try { + assert.strictEqual( + testnetPrimeSdk.state.EOAAddress, + data.eoaAddress, + message.vali_eoa_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get EtherspotWallet address + try { + etherspotWalletAddress = + await testnetPrimeSdk.getCounterFactualAddress(); + + try { + assert.strictEqual( + etherspotWalletAddress, + data.sender, + message.vali_smart_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e.message); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + var test = this; + + await customRetryAsync(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + it( + 'SMOKE: Validate the NFT List on the ' + randomChainName + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let nfts; + try { + nfts = await dataService.getNftList({ + chainId: Number(randomChainId), + account: data.sender, + }); + + if (nfts.items.length > 0) { + addContext(test, message.pass_nft_list_1); + console.log(message.pass_nftList_1); + + try { + assert.isNotEmpty( + nfts.items[0].contractAddress, + message.vali_nftList_contractAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + nfts.items[0].tokenType, + message.vali_nftList_tokenType + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + nfts.items[0].balance, + message.vali_nftList_balance + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + nfts.items[0].items[0].tokenId, + message.vali_nftList_items_tokenId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + nfts.items[0].items[0].amount, + message.vali_nftList_items_amount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_nftList_2); + console.log(message.pass_nftList_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_nftList_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the Token List on the ' + randomChainName + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let tokenLists; + let tokenListTokens; + try { + tokenLists = await dataService.getTokenLists({ + chainId: randomChainId, + }); + + if (tokenLists.length > 0) { + console.log(message.pass_tokenList_1); + addContext(test, message.pass_tokenList_1); + + try { + assert.isNotEmpty( + tokenLists[0].name, + message.vali_tokenList_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenLists[0].endpoint, + message.vali_tokenList_endpoint + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_tokenList_2); + console.log(message.pass_tokenList_2); + } + + tokenListTokens = await dataService.getTokenListTokens({ + chainId: randomChainId, + }); + + if (tokenListTokens.length > 0) { + console.log(message.pass_tokenList_3); + addContext(test, message.pass_tokenList_3); + + try { + assert.isNotEmpty( + tokenListTokens[0].address, + message.vali_tokenListTokens_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].name, + message.vali_tokenListTokens_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].symbol, + message.vali_tokenListTokens_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].decimals, + message.vali_tokenListTokens_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].logoURI, + message.vali_tokenListTokens_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].chainId, + message.vali_tokenListTokens_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_tokenList_4); + console.log(message.pass_tokenList_4); + } + + if (tokenLists.length > 0) { + const { name } = tokenLists[0]; + + tokenListTokens = await dataService.getTokenListTokens({ + chainId: randomChainId, + name, + }); + + if (tokenListTokens.length > 0) { + addContext(test, message.pass_tokenList_5); + console.log(message.pass_tokenList_5); + + try { + assert.isNotEmpty( + tokenListTokens[0].address, + message.vali_selectedTokenListTokens_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].name, + message.vali_selectedTokenListTokens_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].symbol, + message.vali_selectedTokenListTokens_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].decimals, + message.vali_selectedTokenListTokens_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + tokenListTokens[0].logoURI, + message.vali_selectedTokenListTokens_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + tokenListTokens[0].chainId, + message.vali_selectedTokenListTokens_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.pass_tokenList_6); + console.log(message.pass_tokenList_6); + } + } else { + addContext(test, message.pass_tokenList_2); + console.log(message.pass_tokenList_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_tokenList_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.tokenList_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the Exchange Rates on the ' + randomChainName + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let TOKEN_LIST; + let rates; + let requestPayload; + try { + TOKEN_LIST = [randomTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + rates = await dataService.fetchExchangeRates(requestPayload); + + for (let i = 0; i < rates.items.length; i++) { + try { + assert.isNotEmpty( + rates.items[i].address, + message.vali_exchangeRates_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].eth, + message.vali_exchangeRates_eth + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].eur, + message.vali_exchangeRates_eur + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].gbp, + message.vali_exchangeRates_gbp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + rates.items[i].usd, + message.vali_exchangeRates_usd + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the NFT List with invalid account address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + await dataService.getNftList({ + chainId: Number(randomChainId), + account: data.invalidSender, + }); + + addContext(test, message.fail_nftList_2); + assert.fail(message.fail_nftList_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.pass_nftList_3); + console.log(message.pass_nftList_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_nftList_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.nftList_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the NFT List with incorrect account address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + await dataService.getNftList({ + chainId: Number(randomChainId), + account: data.incorrectSender, + }); + + addContext(test, message.fail_nftList_3); + assert.fail(message.fail_nftList_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.pass_nftList_4); + console.log(message.pass_nftList_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_nftList_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.nftList_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with other token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + try { + TOKEN_LIST = [ + randomTokenAddress, + randomTokenAddressUsdt, + randomToTokenAddress, + ]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with invalid token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + try { + TOKEN_LIST = [randomInvalidTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_3); + assert.fail(message.fail_exchangeRates_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_1); + console.log(message.pass_exchangeRates_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with incorrect token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + + try { + TOKEN_LIST = [randomIncorrectTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_4); + assert.fail(message.fail_exchangeRates_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_2); + console.log(message.pass_exchangeRates_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates without token address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + let TOKEN_LIST = []; + + let requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_5); + assert.fail(message.fail_exchangeRates_5); + } catch (e) { + let error = e.message; + if (error.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_3); + console.log(message.pass_exchangeRates_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates with invalid chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + + try { + TOKEN_LIST = [randomTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + chainId: Number(randomInvalidChainId), + }; + + await dataService.fetchExchangeRates(requestPayload); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_4); + console.log(message.pass_exchangeRates_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange Rates without chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let TOKEN_LIST; + let requestPayload; + + try { + TOKEN_LIST = [randomTokenAddress, randomTokenAddressUsdt]; + + requestPayload = { + tokens: TOKEN_LIST, + }; + + await dataService.fetchExchangeRates(requestPayload); + + addContext(test, message.fail_exchangeRates_7); + assert.fail(message.fail_exchangeRates_7); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.property_undefined)) { + addContext(test, message.pass_exchangeRates_5); + console.log(message.pass_exchangeRates_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeRates_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeRates_insufficientBalance); + console.warn(message.exchangeRates_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/testnet/listAndRates/sepolia.spec.js b/test/specs/testnet/listAndRates/sepolia.spec.js deleted file mode 100644 index 02763e9..0000000 --- a/test/specs/testnet/listAndRates/sepolia.spec.js +++ /dev/null @@ -1,849 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils } from '@etherspot/prime-sdk'; -import { utils } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let sepoliaTestNetSdk; -let sepoliaEtherspotWalletAddress; -let sepoliaNativeAddress = null; -let sepoliaDataService; -let runTest; - -describe('The PrimeSDK, when get the NFT List, Token List and Exchange Rates details with sepolia network on the TestNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - sepoliaTestNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.sepolia_chainid), - } - ); - - try { - assert.strictEqual( - sepoliaTestNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - sepoliaEtherspotWalletAddress = - await sepoliaTestNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - sepoliaEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - sepoliaDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await sepoliaDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.sepolia_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === sepoliaNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_sepoliaUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the NFT List on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let nfts; - try { - nfts = await sepoliaDataService.getNftList({ - chainId: Number(data.sepolia_chainid), - account: data.sender, - }); - - if (nfts.items.length > 0) { - addContext(test, message.pass_nft_list_1); - console.log(message.pass_nftList_1); - - try { - assert.isNotEmpty( - nfts.items[0].contractName, - message.vali_nftList_contractName - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].contractAddress, - message.vali_nftList_contractAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].tokenType, - message.vali_nftList_tokenType - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - nfts.items[0].balance, - message.vali_nftList_balance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].items[0].tokenId, - message.vali_nftList_items_tokenId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - nfts.items[0].items[0].name, - message.vali_nftList_items_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - nfts.items[0].items[0].amount, - message.vali_nftList_items_amount - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_nftList_2); - console.log(message.pass_nftList_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Token List on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let tokenLists; - let tokenListTokens; - try { - tokenLists = await sepoliaDataService.getTokenLists({ - chainId: data.sepolia_chainid, - }); - - if (tokenLists.length > 0) { - console.log(message.pass_tokenList_1); - addContext(test, message.pass_tokenList_1); - - try { - assert.isNotEmpty( - tokenLists[0].name, - message.vali_tokenList_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenLists[0].endpoint, - message.vali_tokenList_endpoint - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_2); - console.log(message.pass_tokenList_2); - } - - tokenListTokens = await sepoliaDataService.getTokenListTokens({ - chainId: data.sepolia_chainid, - }); - - if (tokenListTokens.length > 0) { - console.log(message.pass_tokenList_3); - addContext(test, message.pass_tokenList_3); - - try { - assert.isNotEmpty( - tokenListTokens[0].address, - message.vali_tokenListTokens_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].name, - message.vali_tokenListTokens_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].symbol, - message.vali_tokenListTokens_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].decimals, - message.vali_tokenListTokens_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].logoURI, - message.vali_tokenListTokens_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].chainId, - message.vali_tokenListTokens_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_4); - console.log(message.pass_tokenList_4); - } - - if (tokenLists.length > 0) { - const { name } = tokenLists[0]; - - tokenListTokens = await sepoliaDataService.getTokenListTokens({ - chainId: data.sepolia_chainid, - name, - }); - - if (tokenListTokens.length > 0) { - addContext(test, message.pass_tokenList_5); - console.log(message.pass_tokenList_5); - - try { - assert.isNotEmpty( - tokenListTokens[0].address, - message.vali_selectedTokenListTokens_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].name, - message.vali_selectedTokenListTokens_name - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].symbol, - message.vali_selectedTokenListTokens_symbol - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].decimals, - message.vali_selectedTokenListTokens_decimals - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - tokenListTokens[0].logoURI, - message.vali_selectedTokenListTokens_logoURI - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - tokenListTokens[0].chainId, - message.vali_selectedTokenListTokens_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.pass_tokenList_6); - console.log(message.pass_tokenList_6); - } - } else { - addContext(test, message.pass_tokenList_2); - console.log(message.pass_tokenList_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_tokenList_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.tokenList_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the Exchange Rates on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let TOKEN_LIST; - let rates; - let requestPayload; - try { - TOKEN_LIST = [ - data.tokenAddress_sepoliaUSDC, - data.tokenAddress_sepoliaUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.sepolia_chainid), - }; - - rates = await sepoliaDataService.fetchExchangeRates(requestPayload); - - for (let i = 0; i < rates.items.length; i++) { - try { - assert.isNotEmpty( - rates.items[i].address, - message.vali_exchangeRates_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].eth, - message.vali_exchangeRates_eth - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].eur, - message.vali_exchangeRates_eur - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].gbp, - message.vali_exchangeRates_gbp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - rates.items[i].usd, - message.vali_exchangeRates_usd - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the NFT List with invalid account address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await sepoliaDataService.getNftList({ - chainId: Number(data.sepolia_chainid), - account: data.invalidSender, - }); - - addContext(test, message.fail_nftList_2); - assert.fail(message.fail_nftList_2); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.pass_nftList_3); - console.log(message.pass_nftList_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the NFT List with incorrect account address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await sepoliaDataService.getNftList({ - chainId: Number(data.sepolia_chainid), - account: data.incorrectSender, - }); - - addContext(test, message.fail_nftList_3); - assert.fail(message.fail_nftList_3); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.pass_nftList_4); - console.log(message.pass_nftList_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_nftList_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.nftList_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with other token address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - try { - TOKEN_LIST = [ - data.tokenAddress_sepoliaUSDC, - data.tokenAddress_sepoliaUSDT, - data.tokenAddress_maticUSDC, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.sepolia_chainid), - }; - - await sepoliaDataService.fetchExchangeRates(requestPayload); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with invalid token address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - try { - TOKEN_LIST = [ - data.invalidTokenAddress_sepoliaUSDC, - data.tokenAddress_sepoliaUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.sepolia_chainid), - }; - - await sepoliaDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_3); - assert.fail(message.fail_exchangeRates_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_1); - console.log(message.pass_exchangeRates_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with incorrect token address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.incorrectTokenAddress_sepoliaUSDC, - data.tokenAddress_sepoliaUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.sepolia_chainid), - }; - - await sepoliaDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_4); - assert.fail(message.fail_exchangeRates_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_2); - console.log(message.pass_exchangeRates_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates without token address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let TOKEN_LIST = []; - - let requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.sepolia_chainid), - }; - - await sepoliaDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_5); - assert.fail(message.fail_exchangeRates_5); - } catch (e) { - let error = e.message; - if (error.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_3); - console.log(message.pass_exchangeRates_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates with invalid chainid on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.tokenAddress_sepoliaUSDC, - data.tokenAddress_sepoliaUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - chainId: Number(data.invalid_sepolia_chainid), - }; - - await sepoliaDataService.fetchExchangeRates(requestPayload); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_4); - console.log(message.pass_exchangeRates_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the Exchange Rates without chainid on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - let TOKEN_LIST; - let requestPayload; - - try { - TOKEN_LIST = [ - data.tokenAddress_sepoliaUSDC, - data.tokenAddress_sepoliaUSDT, - ]; - - requestPayload = { - tokens: TOKEN_LIST, - }; - - await sepoliaDataService.fetchExchangeRates(requestPayload); - - addContext(test, message.fail_exchangeRates_7); - assert.fail(message.fail_exchangeRates_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.property_undefined)) { - addContext(test, message.pass_exchangeRates_5); - console.log(message.pass_exchangeRates_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_exchangeRates_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.exchangeRates_insufficientBalance); - console.warn(message.exchangeRates_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/testnet/paymaster/amoy.spec.js b/test/specs/testnet/paymaster/amoy.spec.js deleted file mode 100644 index ce59848..0000000 --- a/test/specs/testnet/paymaster/amoy.spec.js +++ /dev/null @@ -1,4241 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, ArkaPaymaster } from '@etherspot/prime-sdk'; -import { ethers, utils } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let amoyTestNetSdk; -let amoyEtherspotWalletAddress; -let amoyNativeAddress = null; -let amoyDataService; -let arkaPaymaster; -let runTest; - -describe('The PrimeSDK, when transaction with arka and pimlico paymasters with amoy network on the TestNet.', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - amoyTestNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.amoy_chainid), - } - ); - - try { - assert.strictEqual( - amoyTestNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - amoyEtherspotWalletAddress = - await amoyTestNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - amoyEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - amoyDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // initializating ArkaPaymaster... - try { - arkaPaymaster = new ArkaPaymaster( - Number(data.amoy_chainid), - process.env.API_KEY, - data.paymaster_arka - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_arka_initialize); - } - - // validate the balance of the wallet - try { - let output = await amoyDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.amoy_chainid), - }); - let native_balance; - let link_balance; - let native_final; - let link_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === amoyNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_amoyLink) { - link_balance = output.items[i].balance; - link_final = utils.formatUnits(link_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - link_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Perform the transfer native token on arka paymaster on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await amoyTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `https://arka.etherspot.io?apiKey=${ - process.env.API_KEY - }&chainId=${Number(data.amoy_chainid)}`, - context: { mode: 'sponsor' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await amoyTestNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer token with arka pimlico paymaster on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let balance; - // get balance of the account address - try { - balance = await amoyTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /** - * The fetching of pimlico erc20 paymaster address is only required for the first time for each specified gas token since we need to approve the tokens to spend - * from the paymaster address on behalf of you. - */ - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - let uoHash1; - let transactionBatch; - let op; - let uoHash; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - - try { - assert.isNotEmpty( - paymasterAddress, - message.vali_pimlico_paymasterAddress_1 - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - let contract; - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_amoyLink, - ERC20_ABI - ); - - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - - contract = await amoyTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_amoyLink, - data: encodedData, - }); - - try { - assert.isNotEmpty(contract.to, message.vali_erc20Contract_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(contract.data, message.vali_erc20Contract_data); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get estimation of transaction - try { - approveOp = await amoyTestNetSdk.estimate(); - - try { - assert.isNotEmpty( - approveOp.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - uoHash1 = await amoyTestNetSdk.send(approveOp); - - try { - assert.isNotEmpty(uoHash1, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // get transaction hash... - try { - console.log('Waiting for transaction...'); - let userOpsReceipt1 = null; - const timeout1 = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt1 == null && Date.now() < timeout1) { - helper.wait(data.mediumTimeout); - userOpsReceipt1 = await amoyTestNetSdk.getUserOpReceipt(uoHash1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await amoyTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { token: data.link_token, mode: 'erc20' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await amoyTestNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } else { - addContext(test, message.fail_paymasterAddress_1); - assert.fail(message.fail_paymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer token with arka paymaster with validUntil and validAfter on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let balance; - let transactionBatch; - let op; - let uoHash; - - // get balance of the account address - try { - balance = await amoyTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await amoyTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await amoyTestNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the metadata of the arka paymaster on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the metadata - try { - let metadata = await arkaPaymaster.metadata(); - - try { - assert.isNotEmpty( - metadata.sponsorAddress, - message.vali_metadata_sponsorAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.sponsorWalletBalance, - message.vali_metadata_sponsorWalletBalance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.chainsSupported, - message.vali_metadata_chainsSupported - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.tokenPaymasters, - message.vali_metadata_tokenPaymasters - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_metadata_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the get token paymaster address function of the arka paymaster on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the get token paymaster address - try { - let getTokenPaymasterAddress = - await arkaPaymaster.getTokenPaymasterAddress('LINK'); - - try { - assert.isNotEmpty( - getTokenPaymasterAddress, - message.vali_getTokenPaymasterAddress_tokenPaymasterAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the remove whitelist address function of the arka paymaster on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the remove whitelist address - try { - let removeWhitelist = await arkaPaymaster.removeWhitelist([ - data.sender, - ]); - - if (removeWhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the add whitelist address function of the arka paymaster on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the add whitelist address - try { - let addWhitelist = await arkaPaymaster.addWhitelist([data.sender]); - - if (addWhitelist.includes(constant.add_whitelist_1)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the check whitelist function of the arka paymaster on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the whilelist address - try { - let checkWhitelist = await arkaPaymaster.checkWhitelist(data.sender); - - if (checkWhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkWhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the deposit function of the arka paymaster on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the deposit - try { - let deposit = await arkaPaymaster.deposit(data.value); - - if (deposit.includes(constant.deposit_1)) { - addContext(test, message.vali_deposit_1); - console.log(message.vali_deposit_1); - } else { - addContext(test, message.fail_deposit_1); - assert.fail(message.fail_deposit_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid arka paymaster url on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: data.invalid_paymaster_arka, // invalid URL - api_key: process.env.API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - if (e.message === constant.not_found) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid API Key of arka paymaster on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INVALID_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with incorrect API Key of arka paymaster on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INCORRECT_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_5); - assert.fail(message.fail_estimateTransaction_5); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_4); - console.log(message.vali_estimateTransaction_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without API Key of arka paymaster on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - // without api_key - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL on the amoy network', async function () { - var test = this; - const invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${invalid_arka_url}/pimlicoAddress${queryString}`, // invalid paymaster URL - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (paymasterAddress.includes(constant.not_found)) { - addContext(test, message.vali_pimlico_paymasterAddress_2); - console.log(message.vali_pimlico_paymasterAddress_2); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_2); - assert.fail(message.fail_pimlico_paymasterAddress_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid API Key in queryString on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.INVALID_API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_3); - console.log(message.vali_pimlico_paymasterAddress_3); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_3); - assert.fail(message.fail_pimlico_paymasterAddress_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without API Key in queryString on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?chainId=${Number(data.amoy_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_4); - console.log(message.vali_pimlico_paymasterAddress_4); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_4); - assert.fail(message.fail_pimlico_paymasterAddress_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid ChainID in queryString on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_amoy_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_3) { - addContext(test, message.vali_pimlico_paymasterAddress_5); - console.log(message.vali_pimlico_paymasterAddress_5); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_5); - assert.fail(message.fail_pimlico_paymasterAddress_5); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without ChainID in queryString on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - // without chainid in queryString - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_6); - console.log(message.vali_pimlico_paymasterAddress_6); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_6); - assert.fail(message.fail_pimlico_paymasterAddress_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Entry Point Address while fetching the paymaster address on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.invalidEntryPointAddress, // invalid entry point address - { token: data.link_token }, - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - const errorMessage = returnedValue.error; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_pimlico_paymasterAddress_7); - console.log(message.vali_pimlico_paymasterAddress_7); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_7); - assert.fail(message.fail_pimlico_paymasterAddress_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token while fetching the paymaster address on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.entryPointAddress, - { token: data.link_token }, // invalid token - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_1) { - addContext(test, message.vali_pimlico_paymasterAddress_8); - console.log(message.vali_pimlico_paymasterAddress_8); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_8); - assert.fail(message.fail_pimlico_paymasterAddress_8); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without parameters while fetching the paymaster address on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [], // without parametets - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_9); - console.log(message.vali_pimlico_paymasterAddress_9); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_9); - assert.fail(message.fail_pimlico_paymasterAddress_9); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect token address of the erc20 contract on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.incorrectTokenAddress_amoyLink, // incorrect token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_amoyLink, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_1); - console.log(message.vali_erc20Contract_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token address of the erc20 contract on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.invalidTokenAddress_amoyLink, // invalid token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_amoyLink, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_2); - console.log(message.vali_erc20Contract_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster address of the erc20 contract on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_amoyLink, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.invalid_paymasterAddress, ethers.constants.MaxUint256] // invalid paymaster address - ); - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_amoyLink, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Contract_3); - console.log(message.vali_erc20Contract_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_5); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect paymaster address of the erc20 contract on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_amoyLink, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.incorrect_paymasterAddress, ethers.constants.MaxUint256] // incorrect paymaster address - ); - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_amoyLink, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Contract_4); - console.log(message.vali_erc20Contract_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid value of the transactions on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_amoyLink, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_amoyLink, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await amoyTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await amoyTestNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.invalidValue), - }); - - addContext(test, message.fail_addTransaction_2); - assert.fail(message.fail_addTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_value_1)) { - addContext(test, vali_addTransaction_1); - console.log(vali_addTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL while estimate the transactions on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_amoyLink, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_amoyLink, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await amoyTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await amoyTestNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { token: data.link_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Api Key while estimate the transactions on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.amoy_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_amoyLink, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_amoyLink, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await amoyTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await amoyTestNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.link_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without Api Key while estimate the transactions on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - let invalid_queryString = `?chainId=${Number(data.amoy_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_amoyLink, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_amoyLink, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await amoyTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await amoyTestNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.link_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid chainid while estimate the transactions on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_amoy_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_amoyLink, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_amoyLink, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await amoyTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await amoyTestNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.link_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka pimlico paymaster without chainid while estimate the transactions on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.link_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_amoyLink, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_amoyLink, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await amoyTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await amoyTestNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.link_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid paymaster URL on the amoy network', async function () { - var test = this; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.amoy_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid API Token on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.amoy_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without API Token on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?chainId=${Number(data.amoy_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid ChainID on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_amoy_chainid - )}`; // invalid ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without ChainID on the amoy network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await amoyTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await amoyTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await amoyTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await amoyTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the get token paymaster address function of the arka paymaster with incorrect token on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(data.link_token); - - addContext(test, message.fail_getTokenPaymasterAddress_2); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_1)) { - addContext(test, message.vali_getTokenPaymasterAddress_1); - console.log(message.vali_getTokenPaymasterAddress_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the get token paymaster address function of the arka paymaster without token on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(); - - addContext(test, message.fail_getTokenPaymasterAddress_3); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_getTokenPaymasterAddress_2); - console.log(message.vali_getTokenPaymasterAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with invalid address on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.invalidSender]); - - addContext(test, message.fail_removeWhitelist_3); - assert.fail(message.fail_removeWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_3); - console.log(message.vali_removeWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with incorrect address on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.incorrectSender]); - - addContext(test, message.fail_removeWhitelist_4); - assert.fail(message.fail_removeWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_4); - console.log(message.vali_removeWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random address on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([randomAddress.address]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random and whitelisted addresses on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress.address, - data.sender, - ]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple whitelisted addresses on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // make whitelisted addresses - await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - // remove whitelist addresses - let removewhitelist = await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (removewhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple random addresses on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - addContext(test, message.fail_removeWhitelist_6); - assert.fail(message.fail_removeWhitelist_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with invalid address on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.invalidSender]); - - addContext(test, message.fail_addWhitelist_3); - assert.fail(message.fail_addWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_3); - console.log(message.vali_addWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with incorrect address on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.incorrectSender]); - - addContext(test, message.fail_addWhitelist_4); - assert.fail(message.fail_addWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_4); - console.log(message.vali_addWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with random address on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_5); - console.log(message.vali_addWhitelist_5); - } else { - addContext(test, message.fail_addWhitelist_7); - assert.fail(message.fail_addWhitelist_7); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with random and whitelisted addresses on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.addWhitelist([ - randomAddress.address, - data.sender, - ]); - - if (addWhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with multiple whitelisted addresses on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // add whitelist addresses - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster with invalid address on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_2); - assert.fail(message.fail_checkWhitelist_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_1); - console.log(message.vali_checkWhitelist_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster with incorrect address on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_3); - assert.fail(message.fail_checkWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_2); - console.log(message.vali_checkWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster with random address on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let checkwhitelist = await arkaPaymaster.checkWhitelist( - randomAddress.address - ); - - if (checkwhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkwhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster without address on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(); - - addContext(test, message.fail_checkWhitelist_5); - assert.fail(message.fail_checkWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_3); - console.log(message.vali_checkWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the deposit function of the arka paymaster with invalid amount on the amoy network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the deposit - try { - await arkaPaymaster.deposit('one'); - - addContext(test, message.fail_deposit_3); - assert.fail(message.fail_deposit_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_deposit_2); - console.log(message.vali_deposit_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/testnet/paymaster/newWallet_paymaster.spec.js b/test/specs/testnet/paymaster/newWallet_paymaster.spec.js new file mode 100644 index 0000000..2272c54 --- /dev/null +++ b/test/specs/testnet/paymaster/newWallet_paymaster.spec.js @@ -0,0 +1,4564 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { + PrimeSdk, + DataUtils, + EtherspotBundler, + ArkaPaymaster, +} from '@etherspot/prime-sdk'; +import { ethers, utils } from 'ethers'; +import { assert } from 'chai'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidChainId, + randomInvalidTokenAddress, + randomTokenAddress, + randomTokenName, +} from '../../../utils/sharedData_testnet.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let testnetPrimeSdk; +let nativeAddress = null; +let dataService; +let arkaPaymaster; +let runTest; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Perform the transaction with arka and pimlico paymasters on the TestNet (with new wallet)', function () { + before(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }); + + it( + 'SMOKE: Perform the transfer native token on arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `https://arka.etherspot.io?apiKey=${ + process.env.API_KEY_ARKA + }&chainId=${Number(randomChainId)}`, + context: { mode: 'sponsor' }, + }, + }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the transfer token with arka pimlico paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY_ARKA}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + // wait for the execution + helper.wait(data.longTimeout); + + let balance; + // get balance of the account address + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + /** + * The fetching of pimlico erc20 paymaster address is only required for the first time for each specified gas token since we need to approve the tokens to spend + * from the paymaster address on behalf of you. + */ + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + let uoHash1; + let transactionBatch; + let op; + let uoHash; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + + try { + assert.isNotEmpty( + paymasterAddress, + message.vali_pimlico_paymasterAddress_1 + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + let contract; + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + + contract = await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + + try { + assert.isNotEmpty(contract.to, message.vali_erc20Contract_to); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + contract.data, + message.vali_erc20Contract_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get estimation of transaction + try { + approveOp = await testnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + approveOp.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + uoHash1 = await testnetPrimeSdk.send(approveOp); + + try { + assert.isNotEmpty( + uoHash1, + message.vali_submitTransaction_uoHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash... + console.log('Waiting for transaction...'); + let userOpsReceipt1 = null; + const timeout1 = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt1 == null && Date.now() < timeout1) { + helper.wait(data.mediumTimeout); + userOpsReceipt1 = await testnetPrimeSdk.getUserOpReceipt(uoHash1); + } + + // wait for the execution + helper.wait(data.longTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + op = await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler... + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty( + uoHash, + message.vali_submitTransaction_uoHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + } else { + addContext(test, message.fail_paymasterAddress_1); + assert.fail(message.fail_paymasterAddress_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Perform the transfer token with arka paymaster with validUntil and validAfter on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let balance; + let transactionBatch; + let op; + let uoHash; + + // get balance of the account address + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + op = await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler... + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the metadata of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the metadata + try { + let metadata = await arkaPaymaster.metadata(); + + try { + assert.isNotEmpty( + metadata.sponsorAddress, + message.vali_metadata_sponsorAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + metadata.sponsorWalletBalance, + message.vali_metadata_sponsorWalletBalance + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + metadata.chainsSupported, + message.vali_metadata_chainsSupported + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + metadata.tokenPaymasters, + message.vali_metadata_tokenPaymasters + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the get token paymaster address function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the get token paymaster address + try { + let getTokenPaymasterAddress = + await arkaPaymaster.getTokenPaymasterAddress('USDC'); + + try { + assert.isNotEmpty( + getTokenPaymasterAddress, + message.vali_getTokenPaymasterAddress_tokenPaymasterAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTokenPaymasterAddress_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the remove whitelist address function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the remove whitelist address + try { + let removeWhitelist = await arkaPaymaster.removeWhitelist([ + data.sender, + ]); + + if (removeWhitelist.includes(constant.remove_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + } else { + addContext(test, message.fail_removeWhitelist_1); + assert.fail(message.fail_removeWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the add whitelist address function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the add whitelist address + try { + let addWhitelist = await arkaPaymaster.addWhitelist([data.sender]); + + if (addWhitelist.includes(constant.add_whitelist_1)) { + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + } else { + addContext(test, message.fail_addWhitelist_1); + assert.fail(message.fail_addWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the check whitelist function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the whilelist address + try { + let checkWhitelist = await arkaPaymaster.checkWhitelist( + data.sender + ); + + if (checkWhitelist.includes(constant.check_whitelist_1)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else if (checkWhitelist.includes(constant.check_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the deposit function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the deposit + try { + let deposit = await arkaPaymaster.deposit(data.value); + + if (deposit.includes(constant.deposit_1)) { + addContext(test, message.vali_deposit_1); + console.log(message.vali_deposit_1); + } else { + addContext(test, message.fail_deposit_1); + assert.fail(message.fail_deposit_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with invalid arka paymaster url on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.invalid_paymaster_arka, // invalid URL + api_key: process.env.API_KEY, + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_2); + assert.fail(message.fail_estimateTransaction_2); + } catch (e) { + if (e.message === constant.not_found) { + addContext(test, message.vali_estimateTransaction_1); + console.log(message.vali_estimateTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with invalid API Key of arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.paymaster_arka, + api_key: process.env.INVALID_API_KEY, + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_4); + assert.fail(message.fail_estimateTransaction_4); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with incorrect API Key of arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.paymaster_arka, + api_key: process.env.INCORRECT_API_KEY, + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_5); + assert.fail(message.fail_estimateTransaction_5); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_4); + console.log(message.vali_estimateTransaction_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token without API Key of arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.paymaster_arka, + // without api_key + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_6); + assert.fail(message.fail_estimateTransaction_6); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_5); + console.log(message.vali_estimateTransaction_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL on the ' + + randomChainName + + ' network', + async function () { + var test = this; + const invalid_arka_url = data.invalid_paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + + try { + returnedValue = await fetch( + `${invalid_arka_url}/pimlicoAddress${queryString}`, // invalid paymaster URL + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + + if (paymasterAddress.includes(constant.not_found_2)) { + addContext(test, message.vali_pimlico_paymasterAddress_2); + console.log(message.vali_pimlico_paymasterAddress_2); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_2); + assert.fail(message.fail_pimlico_paymasterAddress_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid API Key in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.INVALID_API_KEY}&chainId=${Number( + randomChainId + )}`; // invalid API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_apiKey) { + addContext(test, message.vali_pimlico_paymasterAddress_3); + console.log(message.vali_pimlico_paymasterAddress_3); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_3); + assert.fail(message.fail_pimlico_paymasterAddress_3); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without API Key in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?chainId=${Number(randomChainId)}`; // without API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_apiKey) { + addContext(test, message.vali_pimlico_paymasterAddress_4); + console.log(message.vali_pimlico_paymasterAddress_4); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_4); + assert.fail(message.fail_pimlico_paymasterAddress_4); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid ChainID in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomInvalidChainId + )}`; // invalid chainid in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_network_3) { + addContext(test, message.vali_pimlico_paymasterAddress_5); + console.log(message.vali_pimlico_paymasterAddress_5); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_5); + assert.fail(message.fail_pimlico_paymasterAddress_5); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without ChainID in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + // without chainid in queryString + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_data) { + addContext(test, message.vali_pimlico_paymasterAddress_6); + console.log(message.vali_pimlico_paymasterAddress_6); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_6); + assert.fail(message.fail_pimlico_paymasterAddress_6); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Entry Point Address while fetching the paymaster address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [ + data.invalidEntryPointAddress, // invalid entry point address + { token: randomTokenName }, + ], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + const errorMessage = returnedValue.error; + if (errorMessage.includes(constant.invalid_address_4)) { + addContext(test, message.vali_pimlico_paymasterAddress_7); + console.log(message.vali_pimlico_paymasterAddress_7); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_7); + assert.fail(message.fail_pimlico_paymasterAddress_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token while fetching the paymaster address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [ + data.entryPointAddress, + { token: data.invalid_usdc_token }, // invalid token + ], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_network_1) { + addContext(test, message.vali_pimlico_paymasterAddress_8); + console.log(message.vali_pimlico_paymasterAddress_8); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_8); + assert.fail(message.fail_pimlico_paymasterAddress_8); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without parameters while fetching the paymaster address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [], // without parametets + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_data) { + addContext(test, message.vali_pimlico_paymasterAddress_9); + console.log(message.vali_pimlico_paymasterAddress_9); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_9); + assert.fail(message.fail_pimlico_paymasterAddress_9); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect token address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomIncorrectTokenAddress, // incorrect token address + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.contract_address_1)) { + addContext(test, message.vali_erc20Contract_1); + console.log(message.vali_erc20Contract_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_3); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomInvalidTokenAddress, // invalid token address + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.contract_address_1)) { + addContext(test, message.vali_erc20Contract_2); + console.log(message.vali_erc20Contract_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_4); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + paymasterAddress = returnedValue.message; + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [data.invalid_paymasterAddress, ethers.constants.MaxUint256] // invalid paymaster address + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc20Contract_3); + console.log(message.vali_erc20Contract_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_5); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect paymaster address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + paymasterAddress = returnedValue.message; + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [data.incorrect_paymasterAddress, ethers.constants.MaxUint256] // incorrect paymaster address + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc20Contract_4); + console.log(message.vali_erc20Contract_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_6); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid value of the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await testnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.invalidValue), + }); + + addContext(test, message.fail_addTransaction_2); + assert.fail(message.fail_addTransaction_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_value_1)) { + addContext(test, vali_addTransaction_1); + console.log(vali_addTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_3); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_arka_url = data.invalid_paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await testnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${invalid_arka_url}${queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_2); + assert.fail(message.fail_estimateTransaction_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.not_found)) { + addContext(test, message.vali_estimateTransaction_1); + console.log(message.vali_estimateTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_2); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Api Key while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?apiKey=${ + process.env.INVALID_API_KEY + }&chainId=${Number(randomChainId)}`; // invalid API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await testnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_4); + assert.fail(message.fail_estimateTransaction_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without Api Key while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?chainId=${Number(randomChainId)}`; // without API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await testnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_6); + assert.fail(message.fail_estimateTransaction_6); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_estimateTransaction_5); + console.log(message.vali_estimateTransaction_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_6); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid chainid while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomInvalidChainId + )}`; // invalid chainid in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await testnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_7); + assert.fail(message.fail_estimateTransaction_7); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_3)) { + addContext(test, message.vali_estimateTransaction_6); + console.log(message.vali_estimateTransaction_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_7); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without chainid while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await testnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_8); + assert.fail(message.fail_estimateTransaction_8); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_estimateTransaction_7); + console.log(message.vali_estimateTransaction_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_8); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid paymaster URL on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let invalid_arka_url = data.invalid_paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${invalid_arka_url}${queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_2); + assert.fail(message.fail_estimateTransaction_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.not_found)) { + addContext(test, message.vali_estimateTransaction_1); + console.log(message.vali_estimateTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid API Token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?apiKey=${ + process.env.INVALID_API_KEY + }&chainId=${Number(randomChainId)}`; // invalid API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_4); + assert.fail(message.fail_estimateTransaction_4); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without API Token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?chainId=${Number(randomChainId)}`; // without API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_6); + assert.fail(message.fail_estimateTransaction_6); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid ChainID on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomInvalidChainId + )}`; // invalid ChainID in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_7); + assert.fail(message.fail_estimateTransaction_7); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_3)) { + addContext(test, message.vali_estimateTransaction_6); + console.log(message.vali_estimateTransaction_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without ChainID on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_8); + assert.fail(message.fail_estimateTransaction_8); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_estimateTransaction_7); + console.log(message.vali_estimateTransaction_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the get token paymaster address function of the arka paymaster with incorrect token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the get token paymaster address + try { + await arkaPaymaster.getTokenPaymasterAddress( + data.invalid_usdc_token + ); + + addContext(test, message.fail_getTokenPaymasterAddress_2); + assert.fail(message.fail_getTokenPaymasterAddress_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_1)) { + addContext(test, message.vali_getTokenPaymasterAddress_1); + console.log(message.vali_getTokenPaymasterAddress_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTokenPaymasterAddress_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the get token paymaster address function of the arka paymaster without token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the get token paymaster address + try { + await arkaPaymaster.getTokenPaymasterAddress(); + + addContext(test, message.fail_getTokenPaymasterAddress_3); + assert.fail(message.fail_getTokenPaymasterAddress_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_getTokenPaymasterAddress_2); + console.log(message.vali_getTokenPaymasterAddress_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTokenPaymasterAddress_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with invalid address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + await arkaPaymaster.removeWhitelist([data.invalidSender]); + + addContext(test, message.fail_removeWhitelist_3); + assert.fail(message.fail_removeWhitelist_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_removeWhitelist_3); + console.log(message.vali_removeWhitelist_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with incorrect address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + await arkaPaymaster.removeWhitelist([data.incorrectSender]); + + addContext(test, message.fail_removeWhitelist_4); + assert.fail(message.fail_removeWhitelist_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_removeWhitelist_4); + console.log(message.vali_removeWhitelist_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with random address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + await arkaPaymaster.removeWhitelist([randomAddress.address]); + + addContext(test, message.fail_removeWhitelist_5); + assert.fail(message.fail_removeWhitelist_5); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with random and whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + await arkaPaymaster.removeWhitelist([ + randomAddress.address, + data.sender, + ]); + + addContext(test, message.fail_removeWhitelist_5); + assert.fail(message.fail_removeWhitelist_5); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress1 = ethers.Wallet.createRandom(); + const randomAddress2 = ethers.Wallet.createRandom(); + + // make whitelisted addresses + await arkaPaymaster.addWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + // remove whitelist addresses + let removewhitelist = await arkaPaymaster.removeWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + if (removewhitelist.includes(constant.remove_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + } else { + addContext(test, message.fail_removeWhitelist_1); + assert.fail(message.fail_removeWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple random addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress1 = ethers.Wallet.createRandom(); + const randomAddress2 = ethers.Wallet.createRandom(); + await arkaPaymaster.removeWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + addContext(test, message.fail_removeWhitelist_6); + assert.fail(message.fail_removeWhitelist_6); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with invalid address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + await arkaPaymaster.addWhitelist([data.invalidSender]); + + addContext(test, message.fail_addWhitelist_3); + assert.fail(message.fail_addWhitelist_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_addWhitelist_3); + console.log(message.vali_addWhitelist_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with incorrect address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + await arkaPaymaster.addWhitelist([data.incorrectSender]); + + addContext(test, message.fail_addWhitelist_4); + assert.fail(message.fail_addWhitelist_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_addWhitelist_4); + console.log(message.vali_addWhitelist_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with random address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + let addwhitelist = await arkaPaymaster.addWhitelist([ + randomAddress.address, + ]); + + if (addwhitelist.includes(constant.add_whitelist_3)) { + addContext(test, message.vali_addWhitelist_5); + console.log(message.vali_addWhitelist_5); + } else { + addContext(test, message.fail_addWhitelist_7); + assert.fail(message.fail_addWhitelist_7); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with random and whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + await arkaPaymaster.addWhitelist([ + randomAddress.address, + data.sender, + ]); + + if (addWhitelist.includes(constant.add_whitelist_3)) { + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + } else { + addContext(test, message.fail_addWhitelist_1); + assert.fail(message.fail_addWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with multiple whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + const randomAddress1 = ethers.Wallet.createRandom(); + const randomAddress2 = ethers.Wallet.createRandom(); + + // add whitelist addresses + let addwhitelist = await arkaPaymaster.addWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + if (addwhitelist.includes(constant.add_whitelist_3)) { + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + } else { + addContext(test, message.fail_addWhitelist_1); + assert.fail(message.fail_addWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster with invalid address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + await arkaPaymaster.checkWhitelist(data.invalidSender); + + addContext(test, message.fail_checkWhitelist_2); + assert.fail(message.fail_checkWhitelist_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_checkWhitelist_1); + console.log(message.vali_checkWhitelist_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster with incorrect address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + await arkaPaymaster.checkWhitelist(data.invalidSender); + + addContext(test, message.fail_checkWhitelist_3); + assert.fail(message.fail_checkWhitelist_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_checkWhitelist_2); + console.log(message.vali_checkWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster with random address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + let checkwhitelist = await arkaPaymaster.checkWhitelist( + randomAddress.address + ); + + if (checkwhitelist.includes(constant.check_whitelist_1)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else if (checkwhitelist.includes(constant.check_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_4); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster without address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + await arkaPaymaster.checkWhitelist(); + + addContext(test, message.fail_checkWhitelist_5); + assert.fail(message.fail_checkWhitelist_5); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_checkWhitelist_3); + console.log(message.vali_checkWhitelist_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the deposit function of the arka paymaster with invalid amount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the deposit + try { + await arkaPaymaster.deposit('one'); + + addContext(test, message.fail_deposit_3); + assert.fail(message.fail_deposit_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_2); + console.log(message.vali_deposit_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/testnet/paymaster/oldWallet_paymaster.spec.js b/test/specs/testnet/paymaster/oldWallet_paymaster.spec.js new file mode 100644 index 0000000..1fc85a2 --- /dev/null +++ b/test/specs/testnet/paymaster/oldWallet_paymaster.spec.js @@ -0,0 +1,4596 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { + PrimeSdk, + DataUtils, + EtherspotBundler, + ArkaPaymaster, +} from '@etherspot/prime-sdk'; +import { ethers, utils } from 'ethers'; +import { assert } from 'chai'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidChainId, + randomInvalidTokenAddress, + randomTokenAddress, + randomTokenName, +} from '../../../utils/sharedData_testnet.js'; + +let testnetPrimeSdk; +let etherspotWalletAddress; +let nativeAddress = null; +let dataService; +let arkaPaymaster; +let runTest; + +describe('Perform the transaction with arka and pimlico paymasters on the TestNet (with old wallet)', function () { + before(async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + + try { + assert.strictEqual( + testnetPrimeSdk.state.EOAAddress, + data.eoaAddress, + message.vali_eoa_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get EtherspotWallet address + try { + etherspotWalletAddress = + await testnetPrimeSdk.getCounterFactualAddress(); + + try { + assert.strictEqual( + etherspotWalletAddress, + data.sender, + message.vali_smart_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e.message); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + var test = this; + + await customRetryAsync(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + it( + 'SMOKE: Perform the transfer native token on arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `https://arka.etherspot.io?apiKey=${ + process.env.API_KEY_ARKA + }&chainId=${Number(randomChainId)}`, + context: { mode: 'sponsor' }, + }, + }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the transfer token with arka pimlico paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY_ARKA}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + // wait for the execution + helper.wait(data.longTimeout); + + let balance; + // get balance of the account address + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + /** + * The fetching of pimlico erc20 paymaster address is only required for the first time for each specified gas token since we need to approve the tokens to spend + * from the paymaster address on behalf of you. + */ + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + let uoHash1; + let transactionBatch; + let op; + let uoHash; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + + try { + assert.isNotEmpty( + paymasterAddress, + message.vali_pimlico_paymasterAddress_1 + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + let contract; + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + + contract = await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + + try { + assert.isNotEmpty(contract.to, message.vali_erc20Contract_to); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + contract.data, + message.vali_erc20Contract_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get estimation of transaction + try { + approveOp = await testnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + approveOp.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + approveOp.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + uoHash1 = await testnetPrimeSdk.send(approveOp); + + try { + assert.isNotEmpty( + uoHash1, + message.vali_submitTransaction_uoHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash... + console.log('Waiting for transaction...'); + let userOpsReceipt1 = null; + const timeout1 = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt1 == null && Date.now() < timeout1) { + helper.wait(data.mediumTimeout); + userOpsReceipt1 = await testnetPrimeSdk.getUserOpReceipt(uoHash1); + } + + // wait for the execution + helper.wait(data.longTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + op = await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler... + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty( + uoHash, + message.vali_submitTransaction_uoHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + } else { + addContext(test, message.fail_paymasterAddress_1); + assert.fail(message.fail_paymasterAddress_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Perform the transfer token with arka paymaster with validUntil and validAfter on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let balance; + let transactionBatch; + let op; + let uoHash; + + // get balance of the account address + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + op = await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler... + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the metadata of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the metadata + try { + let metadata = await arkaPaymaster.metadata(); + + try { + assert.isNotEmpty( + metadata.sponsorAddress, + message.vali_metadata_sponsorAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + metadata.sponsorWalletBalance, + message.vali_metadata_sponsorWalletBalance + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + metadata.chainsSupported, + message.vali_metadata_chainsSupported + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + metadata.tokenPaymasters, + message.vali_metadata_tokenPaymasters + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the get token paymaster address function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the get token paymaster address + try { + let getTokenPaymasterAddress = + await arkaPaymaster.getTokenPaymasterAddress('USDC'); + + try { + assert.isNotEmpty( + getTokenPaymasterAddress, + message.vali_getTokenPaymasterAddress_tokenPaymasterAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTokenPaymasterAddress_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the remove whitelist address function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the remove whitelist address + try { + let removeWhitelist = await arkaPaymaster.removeWhitelist([ + data.sender, + ]); + + if (removeWhitelist.includes(constant.remove_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + } else { + addContext(test, message.fail_removeWhitelist_1); + assert.fail(message.fail_removeWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the add whitelist address function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the add whitelist address + try { + let addWhitelist = await arkaPaymaster.addWhitelist([data.sender]); + + if (addWhitelist.includes(constant.add_whitelist_1)) { + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + } else { + addContext(test, message.fail_addWhitelist_1); + assert.fail(message.fail_addWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the check whitelist function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the whilelist address + try { + let checkWhitelist = await arkaPaymaster.checkWhitelist( + data.sender + ); + + if (checkWhitelist.includes(constant.check_whitelist_1)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else if (checkWhitelist.includes(constant.check_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the deposit function of the arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // validate the deposit + try { + let deposit = await arkaPaymaster.deposit(data.value); + + if (deposit.includes(constant.deposit_1)) { + addContext(test, message.vali_deposit_1); + console.log(message.vali_deposit_1); + } else { + addContext(test, message.fail_deposit_1); + assert.fail(message.fail_deposit_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with invalid arka paymaster url on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.invalid_paymaster_arka, // invalid URL + api_key: process.env.API_KEY, + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_2); + assert.fail(message.fail_estimateTransaction_2); + } catch (e) { + if (e.message === constant.not_found) { + addContext(test, message.vali_estimateTransaction_1); + console.log(message.vali_estimateTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with invalid API Key of arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.paymaster_arka, + api_key: process.env.INVALID_API_KEY, + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_4); + assert.fail(message.fail_estimateTransaction_4); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with incorrect API Key of arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.paymaster_arka, + api_key: process.env.INCORRECT_API_KEY, + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_5); + assert.fail(message.fail_estimateTransaction_5); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_4); + console.log(message.vali_estimateTransaction_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token without API Key of arka paymaster on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: data.paymaster_arka, + // without api_key + context: { mode: 'sponsor' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_6); + assert.fail(message.fail_estimateTransaction_6); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_5); + console.log(message.vali_estimateTransaction_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL on the ' + + randomChainName + + ' network', + async function () { + var test = this; + const invalid_arka_url = data.invalid_paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + + try { + returnedValue = await fetch( + `${invalid_arka_url}/pimlicoAddress${queryString}`, // invalid paymaster URL + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + + if (paymasterAddress.includes(constant.not_found_2)) { + addContext(test, message.vali_pimlico_paymasterAddress_2); + console.log(message.vali_pimlico_paymasterAddress_2); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_2); + assert.fail(message.fail_pimlico_paymasterAddress_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid API Key in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.INVALID_API_KEY}&chainId=${Number( + randomChainId + )}`; // invalid API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_apiKey) { + addContext(test, message.vali_pimlico_paymasterAddress_3); + console.log(message.vali_pimlico_paymasterAddress_3); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_3); + assert.fail(message.fail_pimlico_paymasterAddress_3); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without API Key in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?chainId=${Number(randomChainId)}`; // without API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_apiKey) { + addContext(test, message.vali_pimlico_paymasterAddress_4); + console.log(message.vali_pimlico_paymasterAddress_4); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_4); + assert.fail(message.fail_pimlico_paymasterAddress_4); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid ChainID in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomInvalidChainId + )}`; // invalid chainid in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_network_3) { + addContext(test, message.vali_pimlico_paymasterAddress_5); + console.log(message.vali_pimlico_paymasterAddress_5); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_5); + assert.fail(message.fail_pimlico_paymasterAddress_5); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without ChainID in queryString on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + // without chainid in queryString + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_data) { + addContext(test, message.vali_pimlico_paymasterAddress_6); + console.log(message.vali_pimlico_paymasterAddress_6); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_6); + assert.fail(message.fail_pimlico_paymasterAddress_6); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Entry Point Address while fetching the paymaster address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [ + data.invalidEntryPointAddress, // invalid entry point address + { token: randomTokenName }, + ], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + const errorMessage = returnedValue.error; + if (errorMessage.includes(constant.invalid_address_4)) { + addContext(test, message.vali_pimlico_paymasterAddress_7); + console.log(message.vali_pimlico_paymasterAddress_7); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_7); + assert.fail(message.fail_pimlico_paymasterAddress_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token while fetching the paymaster address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [ + data.entryPointAddress, + { token: data.invalid_usdc_token }, // invalid token + ], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_network_1) { + addContext(test, message.vali_pimlico_paymasterAddress_8); + console.log(message.vali_pimlico_paymasterAddress_8); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_8); + assert.fail(message.fail_pimlico_paymasterAddress_8); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without parameters while fetching the paymaster address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [], // without parametets + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (returnedValue.error === constant.invalid_data) { + addContext(test, message.vali_pimlico_paymasterAddress_9); + console.log(message.vali_pimlico_paymasterAddress_9); + } else { + addContext(test, message.fail_pimlico_paymasterAddress_9); + assert.fail(message.fail_pimlico_paymasterAddress_9); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect token address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomIncorrectTokenAddress, // incorrect token address + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.contract_address_1)) { + addContext(test, message.vali_erc20Contract_1); + console.log(message.vali_erc20Contract_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_3); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomInvalidTokenAddress, // invalid token address + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.contract_address_1)) { + addContext(test, message.vali_erc20Contract_2); + console.log(message.vali_erc20Contract_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_4); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + paymasterAddress = returnedValue.message; + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [data.invalid_paymasterAddress, ethers.constants.MaxUint256] // invalid paymaster address + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc20Contract_3); + console.log(message.vali_erc20Contract_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_5); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect paymaster address of the erc20 contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + paymasterAddress = returnedValue.message; + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [data.incorrect_paymasterAddress, ethers.constants.MaxUint256] // incorrect paymaster address + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc20Contract_4); + console.log(message.vali_erc20Contract_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_6); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid value of the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await testnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.invalidValue), + }); + + addContext(test, message.fail_addTransaction_2); + assert.fail(message.fail_addTransaction_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_value_1)) { + addContext(test, vali_addTransaction_1); + console.log(vali_addTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_3); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_arka_url = data.invalid_paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await testnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${invalid_arka_url}${queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_2); + assert.fail(message.fail_estimateTransaction_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.not_found)) { + addContext(test, message.vali_estimateTransaction_1); + console.log(message.vali_estimateTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_2); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Api Key while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?apiKey=${ + process.env.INVALID_API_KEY + }&chainId=${Number(randomChainId)}`; // invalid API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await testnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_4); + assert.fail(message.fail_estimateTransaction_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without Api Key while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?chainId=${Number(randomChainId)}`; // without API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await testnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_6); + assert.fail(message.fail_estimateTransaction_6); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_estimateTransaction_5); + console.log(message.vali_estimateTransaction_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_6); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid chainid while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomInvalidChainId + )}`; // invalid chainid in queryString + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await testnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_7); + assert.fail(message.fail_estimateTransaction_7); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_3)) { + addContext(test, message.vali_estimateTransaction_6); + console.log(message.vali_estimateTransaction_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_7); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer token on arka pimlico paymaster without chainid while estimate the transactions on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID + if (runTest) { + await customRetryAsync(async function () { + let returnedValue; + let paymasterAddress; + let erc20Contract; + let encodedData; + let approveOp; + + try { + returnedValue = await fetch( + `${arka_url}/pimlicoAddress${queryString}`, + { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + params: [data.entryPointAddress, { token: randomTokenName }], + }), + } + ).then((res) => { + return res.json(); + }); + + paymasterAddress = returnedValue.message; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + if (utils.isAddress(paymasterAddress)) { + // get the erc20 Contract + try { + erc20Contract = new ethers.Contract( + randomTokenAddress, + ERC20_ABI + ); + encodedData = erc20Contract.interface.encodeFunctionData( + 'approve', + [paymasterAddress, ethers.constants.MaxUint256] + ); + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: encodedData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_1); + } + + // get the UserOp Hash + try { + approveOp = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // get the uoHash1 + try { + await testnetPrimeSdk.send(approveOp); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { token: randomTokenName, mode: 'erc20' }, + }, + }); + + addContext(test, message.fail_estimateTransaction_8); + assert.fail(message.fail_estimateTransaction_8); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_estimateTransaction_7); + console.log(message.vali_estimateTransaction_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_8); + } + } + } else { + addContext(test, message.fail_erc20Contract_2); + assert.fail(message.fail_erc20Contract_2); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid paymaster URL on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let invalid_arka_url = data.invalid_paymaster_arka; + let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomChainId + )}`; + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${invalid_arka_url}${queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_2); + assert.fail(message.fail_estimateTransaction_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.not_found)) { + addContext(test, message.vali_estimateTransaction_1); + console.log(message.vali_estimateTransaction_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid API Token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?apiKey=${ + process.env.INVALID_API_KEY + }&chainId=${Number(randomChainId)}`; // invalid API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_4); + assert.fail(message.fail_estimateTransaction_4); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without API Token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?chainId=${Number(randomChainId)}`; // without API Key in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_6); + assert.fail(message.fail_estimateTransaction_6); + } catch (e) { + if (e.message === constant.invalid_apiKey) { + addContext(test, message.vali_estimateTransaction_3); + console.log(message.vali_estimateTransaction_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid ChainID on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( + randomInvalidChainId + )}`; // invalid ChainID in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_7); + assert.fail(message.fail_estimateTransaction_7); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_3)) { + addContext(test, message.vali_estimateTransaction_6); + console.log(message.vali_estimateTransaction_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without ChainID on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let arka_url = data.paymaster_arka; + let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID in queryString + if (runTest) { + await customRetryAsync(async function () { + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlico_paymasterAddress_1); + } + + /* estimate transactions added to the batch and get the fee data for the UserOp + validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds + For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated + validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters + */ + + // estimate transactions added to the batch and get the fee data for the UserOp + try { + await testnetPrimeSdk.estimate({ + paymasterDetails: { + url: `${arka_url}${invalid_queryString}`, + context: { + mode: 'sponsor', + validAfter: new Date().valueOf(), + validUntil: new Date().valueOf() + 6000000, + }, + }, + }); + + addContext(test, message.fail_estimateTransaction_8); + assert.fail(message.fail_estimateTransaction_8); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_estimateTransaction_7); + console.log(message.vali_estimateTransaction_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.pimlocoPaymaster_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the get token paymaster address function of the arka paymaster with incorrect token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the get token paymaster address + try { + await arkaPaymaster.getTokenPaymasterAddress( + data.invalid_usdc_token + ); + + addContext(test, message.fail_getTokenPaymasterAddress_2); + assert.fail(message.fail_getTokenPaymasterAddress_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_1)) { + addContext(test, message.vali_getTokenPaymasterAddress_1); + console.log(message.vali_getTokenPaymasterAddress_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTokenPaymasterAddress_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the get token paymaster address function of the arka paymaster without token on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the get token paymaster address + try { + await arkaPaymaster.getTokenPaymasterAddress(); + + addContext(test, message.fail_getTokenPaymasterAddress_3); + assert.fail(message.fail_getTokenPaymasterAddress_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_getTokenPaymasterAddress_2); + console.log(message.vali_getTokenPaymasterAddress_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTokenPaymasterAddress_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with invalid address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + await arkaPaymaster.removeWhitelist([data.invalidSender]); + + addContext(test, message.fail_removeWhitelist_3); + assert.fail(message.fail_removeWhitelist_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_removeWhitelist_3); + console.log(message.vali_removeWhitelist_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with incorrect address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + await arkaPaymaster.removeWhitelist([data.incorrectSender]); + + addContext(test, message.fail_removeWhitelist_4); + assert.fail(message.fail_removeWhitelist_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_removeWhitelist_4); + console.log(message.vali_removeWhitelist_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with random address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + await arkaPaymaster.removeWhitelist([randomAddress.address]); + + addContext(test, message.fail_removeWhitelist_5); + assert.fail(message.fail_removeWhitelist_5); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with random and whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + await arkaPaymaster.removeWhitelist([ + randomAddress.address, + data.sender, + ]); + + addContext(test, message.fail_removeWhitelist_5); + assert.fail(message.fail_removeWhitelist_5); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress1 = ethers.Wallet.createRandom(); + const randomAddress2 = ethers.Wallet.createRandom(); + + // make whitelisted addresses + await arkaPaymaster.addWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + // remove whitelist addresses + let removewhitelist = await arkaPaymaster.removeWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + if (removewhitelist.includes(constant.remove_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + } else { + addContext(test, message.fail_removeWhitelist_1); + assert.fail(message.fail_removeWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple random addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the remove whitelist address + try { + const randomAddress1 = ethers.Wallet.createRandom(); + const randomAddress2 = ethers.Wallet.createRandom(); + await arkaPaymaster.removeWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + addContext(test, message.fail_removeWhitelist_6); + assert.fail(message.fail_removeWhitelist_6); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with invalid address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + await arkaPaymaster.addWhitelist([data.invalidSender]); + + addContext(test, message.fail_addWhitelist_3); + assert.fail(message.fail_addWhitelist_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_addWhitelist_3); + console.log(message.vali_addWhitelist_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with incorrect address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + await arkaPaymaster.addWhitelist([data.incorrectSender]); + + addContext(test, message.fail_addWhitelist_4); + assert.fail(message.fail_addWhitelist_4); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_addWhitelist_4); + console.log(message.vali_addWhitelist_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with random address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + let addwhitelist = await arkaPaymaster.addWhitelist([ + randomAddress.address, + ]); + + if (addwhitelist.includes(constant.add_whitelist_3)) { + addContext(test, message.vali_addWhitelist_5); + console.log(message.vali_addWhitelist_5); + } else { + addContext(test, message.fail_addWhitelist_7); + assert.fail(message.fail_addWhitelist_7); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with random and whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + await arkaPaymaster.addWhitelist([ + randomAddress.address, + data.sender, + ]); + + if (addWhitelist.includes(constant.add_whitelist_3)) { + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + } else { + addContext(test, message.fail_addWhitelist_1); + assert.fail(message.fail_addWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the add whitelist address function of the arka paymaster with multiple whitelisted addresses on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the add whitelist address + try { + const randomAddress1 = ethers.Wallet.createRandom(); + const randomAddress2 = ethers.Wallet.createRandom(); + + // add whitelist addresses + let addwhitelist = await arkaPaymaster.addWhitelist([ + randomAddress1.address, + randomAddress2.address, + ]); + + if (addwhitelist.includes(constant.add_whitelist_3)) { + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + } else { + addContext(test, message.fail_addWhitelist_1); + assert.fail(message.fail_addWhitelist_1); + } + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster with invalid address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + await arkaPaymaster.checkWhitelist(data.invalidSender); + + addContext(test, message.fail_checkWhitelist_2); + assert.fail(message.fail_checkWhitelist_2); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_checkWhitelist_1); + console.log(message.vali_checkWhitelist_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster with incorrect address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + await arkaPaymaster.checkWhitelist(data.invalidSender); + + addContext(test, message.fail_checkWhitelist_3); + assert.fail(message.fail_checkWhitelist_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_checkWhitelist_2); + console.log(message.vali_checkWhitelist_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster with random address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + const randomAddress = ethers.Wallet.createRandom(); + let checkwhitelist = await arkaPaymaster.checkWhitelist( + randomAddress.address + ); + + if (checkwhitelist.includes(constant.check_whitelist_1)) { + addContext(test, message.vali_addWhitelist_2); + console.log(message.vali_addWhitelist_2); + } else if (checkwhitelist.includes(constant.check_whitelist_2)) { + addContext(test, message.vali_removeWhitelist_2); + console.log(message.vali_removeWhitelist_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_4); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the check whitelist function of the arka paymaster without address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the whilelist address + try { + await arkaPaymaster.checkWhitelist(); + + addContext(test, message.fail_checkWhitelist_5); + assert.fail(message.fail_checkWhitelist_5); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_address_5)) { + addContext(test, message.vali_checkWhitelist_3); + console.log(message.vali_checkWhitelist_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the deposit function of the arka paymaster with invalid amount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // validate the deposit + try { + await arkaPaymaster.deposit('one'); + + addContext(test, message.fail_deposit_3); + assert.fail(message.fail_deposit_3); + } catch (e) { + let errorMessage = e.message; + if (errorMessage.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_2); + console.log(message.vali_deposit_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.arkaFunction_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/testnet/paymaster/sepolia.spec.js b/test/specs/testnet/paymaster/sepolia.spec.js deleted file mode 100644 index 53550d2..0000000 --- a/test/specs/testnet/paymaster/sepolia.spec.js +++ /dev/null @@ -1,4242 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils, ArkaPaymaster } from '@etherspot/prime-sdk'; -import { ethers, utils } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let sepoliaTestNetSdk; -let sepoliaEtherspotWalletAddress; -let sepoliaNativeAddress = null; -let sepoliaDataService; -let arkaPaymaster; -let runTest; - -describe('The PrimeSDK, when transaction with arka and pimlico paymasters with sepolia network on the TestNet.', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - sepoliaTestNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.sepolia_chainid), - } - ); - - try { - assert.strictEqual( - sepoliaTestNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - sepoliaEtherspotWalletAddress = - await sepoliaTestNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - sepoliaEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - sepoliaDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // initializating ArkaPaymaster... - try { - arkaPaymaster = new ArkaPaymaster( - Number(data.sepolia_chainid), - process.env.API_KEY, - data.paymaster_arka - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_arka_initialize); - } - - // validate the balance of the wallet - try { - let output = await sepoliaDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.sepolia_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === sepoliaNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_sepoliaUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Perform the transfer native token on arka paymaster on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await sepoliaTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `https://arka.etherspot.io?apiKey=${ - process.env.API_KEY - }&chainId=${Number(data.sepolia_chainid)}`, - context: { mode: 'sponsor' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await sepoliaTestNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer token with arka pimlico paymaster on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let balance; - // get balance of the account address - try { - balance = await sepoliaTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /** - * The fetching of pimlico erc20 paymaster address is only required for the first time for each specified gas token since we need to approve the tokens to spend - * from the paymaster address on behalf of you. - */ - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - let uoHash1; - let transactionBatch; - let op; - let uoHash; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - - try { - assert.isNotEmpty( - paymasterAddress, - message.vali_pimlico_paymasterAddress_1 - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - let contract; - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI - ); - - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - - contract = await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: encodedData, - }); - - try { - assert.isNotEmpty(contract.to, message.vali_erc20Contract_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(contract.data, message.vali_erc20Contract_data); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get estimation of transaction - try { - approveOp = await sepoliaTestNetSdk.estimate(); - - try { - assert.isNotEmpty( - approveOp.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - approveOp.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - uoHash1 = await sepoliaTestNetSdk.send(approveOp); - - try { - assert.isNotEmpty(uoHash1, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // get transaction hash... - try { - console.log('Waiting for transaction...'); - let userOpsReceipt1 = null; - const timeout1 = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt1 == null && Date.now() < timeout1) { - helper.wait(data.mediumTimeout); - userOpsReceipt1 = - await sepoliaTestNetSdk.getUserOpReceipt(uoHash1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await sepoliaTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.nonce, - message.vali_estimateTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await sepoliaTestNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - } else { - addContext(test, message.fail_paymasterAddress_1); - assert.fail(message.fail_paymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer token with arka paymaster with validUntil and validAfter on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - let balance; - let transactionBatch; - let op; - let uoHash; - - // get balance of the account address - try { - balance = await sepoliaTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - transactionBatch = await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - balance = await sepoliaTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - op = await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler... - try { - uoHash = await sepoliaTestNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the metadata of the arka paymaster on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the metadata - try { - let metadata = await arkaPaymaster.metadata(); - - try { - assert.isNotEmpty( - metadata.sponsorAddress, - message.vali_metadata_sponsorAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.sponsorWalletBalance, - message.vali_metadata_sponsorWalletBalance - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.chainsSupported, - message.vali_metadata_chainsSupported - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - metadata.tokenPaymasters, - message.vali_metadata_tokenPaymasters - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_metadata_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the get token paymaster address function of the arka paymaster on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the get token paymaster address - try { - let getTokenPaymasterAddress = - await arkaPaymaster.getTokenPaymasterAddress('USDC'); - - try { - assert.isNotEmpty( - getTokenPaymasterAddress, - message.vali_getTokenPaymasterAddress_tokenPaymasterAddress - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the remove whitelist address function of the arka paymaster on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the remove whitelist address - try { - let removeWhitelist = await arkaPaymaster.removeWhitelist([ - data.sender, - ]); - - if (removeWhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the add whitelist address function of the arka paymaster on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the add whitelist address - try { - let addWhitelist = await arkaPaymaster.addWhitelist([data.sender]); - - if (addWhitelist.includes(constant.add_whitelist_1)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the check whitelist function of the arka paymaster on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the whilelist address - try { - let checkWhitelist = await arkaPaymaster.checkWhitelist(data.sender); - - if (checkWhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkWhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Validate the deposit function of the arka paymaster on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // validate the deposit - try { - let deposit = await arkaPaymaster.deposit(data.value); - - if (deposit.includes(constant.deposit_1)) { - addContext(test, message.vali_deposit_1); - console.log(message.vali_deposit_1); - } else { - addContext(test, message.fail_deposit_1); - assert.fail(message.fail_deposit_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid arka paymaster url on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: data.invalid_paymaster_arka, // invalid URL - api_key: process.env.API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - if (e.message === constant.not_found) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with invalid API Key of arka paymaster on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INVALID_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with incorrect API Key of arka paymaster on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - api_key: process.env.INCORRECT_API_KEY, - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_5); - assert.fail(message.fail_estimateTransaction_5); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_4); - console.log(message.vali_estimateTransaction_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without API Key of arka paymaster on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: data.paymaster_arka, - // without api_key - context: { mode: 'sponsor' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL on the sepolia network', async function () { - var test = this; - const invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${invalid_arka_url}/pimlicoAddress${queryString}`, // invalid paymaster URL - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (paymasterAddress.includes(constant.not_found)) { - addContext(test, message.vali_pimlico_paymasterAddress_2); - console.log(message.vali_pimlico_paymasterAddress_2); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_2); - assert.fail(message.fail_pimlico_paymasterAddress_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid API Key in queryString on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.INVALID_API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_3); - console.log(message.vali_pimlico_paymasterAddress_3); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_3); - assert.fail(message.fail_pimlico_paymasterAddress_3); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without API Key in queryString on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?chainId=${Number(data.sepolia_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_apiKey) { - addContext(test, message.vali_pimlico_paymasterAddress_4); - console.log(message.vali_pimlico_paymasterAddress_4); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_4); - assert.fail(message.fail_pimlico_paymasterAddress_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid ChainID in queryString on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_sepolia_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_3) { - addContext(test, message.vali_pimlico_paymasterAddress_5); - console.log(message.vali_pimlico_paymasterAddress_5); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_5); - assert.fail(message.fail_pimlico_paymasterAddress_5); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without ChainID in queryString on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - // without chainid in queryString - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_6); - console.log(message.vali_pimlico_paymasterAddress_6); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_6); - assert.fail(message.fail_pimlico_paymasterAddress_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Entry Point Address while fetching the paymaster address on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.invalidEntryPointAddress, // invalid entry point address - { token: data.usdc_token }, - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - const errorMessage = returnedValue.error; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_pimlico_paymasterAddress_7); - console.log(message.vali_pimlico_paymasterAddress_7); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_7); - assert.fail(message.fail_pimlico_paymasterAddress_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token while fetching the paymaster address on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.entryPointAddress, - { token: data.invalid_usdc_token }, // invalid token - ], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_network_1) { - addContext(test, message.vali_pimlico_paymasterAddress_8); - console.log(message.vali_pimlico_paymasterAddress_8); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_8); - assert.fail(message.fail_pimlico_paymasterAddress_8); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without parameters while fetching the paymaster address on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [], // without parametets - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (returnedValue.error === constant.invalid_data) { - addContext(test, message.vali_pimlico_paymasterAddress_9); - console.log(message.vali_pimlico_paymasterAddress_9); - } else { - addContext(test, message.fail_pimlico_paymasterAddress_9); - assert.fail(message.fail_pimlico_paymasterAddress_9); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect token address of the erc20 contract on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.incorrectTokenAddress_sepoliaUSDC, // incorrect token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_1); - console.log(message.vali_erc20Contract_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid token address of the erc20 contract on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.invalidTokenAddress_sepoliaUSDC, // invalid token address - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.contract_address_1)) { - addContext(test, message.vali_erc20Contract_2); - console.log(message.vali_erc20Contract_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster address of the erc20 contract on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.invalid_paymasterAddress, ethers.constants.MaxUint256] // invalid paymaster address - ); - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Contract_3); - console.log(message.vali_erc20Contract_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_5); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with incorrect paymaster address of the erc20 contract on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - paymasterAddress = returnedValue.message; - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [data.incorrect_paymasterAddress, ethers.constants.MaxUint256] // incorrect paymaster address - ); - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: encodedData, - }); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Contract_4); - console.log(message.vali_erc20Contract_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid value of the transactions on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await sepoliaTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await sepoliaTestNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.invalidValue), - }); - - addContext(test, message.fail_addTransaction_2); - assert.fail(message.fail_addTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_value_1)) { - addContext(test, vali_addTransaction_1); - console.log(vali_addTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_3); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid paymaster URL while estimate the transactions on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await sepoliaTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await sepoliaTestNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid Api Key while estimate the transactions on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.sepolia_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await sepoliaTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await sepoliaTestNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without Api Key while estimate the transactions on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - let invalid_queryString = `?chainId=${Number(data.sepolia_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await sepoliaTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await sepoliaTestNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_estimateTransaction_5); - console.log(message.vali_estimateTransaction_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_6); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster with invalid chainid while estimate the transactions on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_sepolia_chainid - )}`; // invalid chainid in queryString - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await sepoliaTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await sepoliaTestNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer token on arka pimlico paymaster without chainid while estimate the transactions on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID - if (runTest) { - await customRetryAsync(async function () { - let returnedValue; - let paymasterAddress; - let erc20Contract; - let encodedData; - let approveOp; - - try { - returnedValue = await fetch( - `${arka_url}/pimlicoAddress${queryString}`, - { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [data.entryPointAddress, { token: data.usdc_token }], - }), - } - ).then((res) => { - return res.json(); - }); - - paymasterAddress = returnedValue.message; - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - if (utils.isAddress(paymasterAddress)) { - // get the erc20 Contract - try { - erc20Contract = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI - ); - encodedData = erc20Contract.interface.encodeFunctionData( - 'approve', - [paymasterAddress, ethers.constants.MaxUint256] - ); - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: encodedData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_1); - } - - // get the UserOp Hash - try { - approveOp = await sepoliaTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // get the uoHash1 - try { - await sepoliaTestNetSdk.send(approveOp); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { token: data.usdc_token, mode: 'erc20' }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - } else { - addContext(test, message.fail_erc20Contract_2); - assert.fail(message.fail_erc20Contract_2); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid paymaster URL on the sepolia network', async function () { - var test = this; - let invalid_arka_url = data.invalid_paymaster_arka; - let queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.sepolia_chainid - )}`; - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `${invalid_arka_url}${queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_2); - assert.fail(message.fail_estimateTransaction_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.not_found)) { - addContext(test, message.vali_estimateTransaction_1); - console.log(message.vali_estimateTransaction_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid API Token on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${ - process.env.INVALID_API_KEY - }&chainId=${Number(data.sepolia_chainid)}`; // invalid API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_4); - assert.fail(message.fail_estimateTransaction_4); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without API Token on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?chainId=${Number(data.sepolia_chainid)}`; // without API Key in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_6); - assert.fail(message.fail_estimateTransaction_6); - } catch (e) { - if (e.message === constant.invalid_apiKey) { - addContext(test, message.vali_estimateTransaction_3); - console.log(message.vali_estimateTransaction_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter with invalid ChainID on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}&chainId=${Number( - data.invalid_sepolia_chainid - )}`; // invalid ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_7); - assert.fail(message.fail_estimateTransaction_7); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_3)) { - addContext(test, message.vali_estimateTransaction_6); - console.log(message.vali_estimateTransaction_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer token on arka paymaster with validUntil and validAfter without ChainID on the sepolia network', async function () { - var test = this; - let arka_url = data.paymaster_arka; - let invalid_queryString = `?apiKey=${process.env.API_KEY}`; // without ChainID in queryString - if (runTest) { - await customRetryAsync(async function () { - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_pimlico_paymasterAddress_1); - } - - /* estimate transactions added to the batch and get the fee data for the UserOp - validUntil and validAfter are optional defaults to 10 mins of expiry from send call and should be passed in terms of milliseconds - For example purpose, the valid is fixed as expiring in 100 mins once the paymaster data is generated - validUntil and validAfter is relevant only with sponsor transactions and not for token paymasters - */ - - // estimate transactions added to the batch and get the fee data for the UserOp - try { - await sepoliaTestNetSdk.estimate({ - paymasterDetails: { - url: `${arka_url}${invalid_queryString}`, - context: { - mode: 'sponsor', - validAfter: new Date().valueOf(), - validUntil: new Date().valueOf() + 6000000, - }, - }, - }); - - addContext(test, message.fail_estimateTransaction_8); - assert.fail(message.fail_estimateTransaction_8); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_estimateTransaction_7); - console.log(message.vali_estimateTransaction_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.pimlocoPaymaster_insufficientBalance); - console.warn(message.pimlocoPaymaster_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the get token paymaster address function of the arka paymaster with incorrect token on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(data.invalid_usdc_token); - - addContext(test, message.fail_getTokenPaymasterAddress_2); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_1)) { - addContext(test, message.vali_getTokenPaymasterAddress_1); - console.log(message.vali_getTokenPaymasterAddress_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the get token paymaster address function of the arka paymaster without token on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the get token paymaster address - try { - await arkaPaymaster.getTokenPaymasterAddress(); - - addContext(test, message.fail_getTokenPaymasterAddress_3); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_getTokenPaymasterAddress_2); - console.log(message.vali_getTokenPaymasterAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTokenPaymasterAddress_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with invalid address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.invalidSender]); - - addContext(test, message.fail_removeWhitelist_3); - assert.fail(message.fail_removeWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_3); - console.log(message.vali_removeWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with incorrect address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - await arkaPaymaster.removeWhitelist([data.incorrectSender]); - - addContext(test, message.fail_removeWhitelist_4); - assert.fail(message.fail_removeWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_removeWhitelist_4); - console.log(message.vali_removeWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([randomAddress.address]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with random and whitelisted addresses on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress.address, - data.sender, - ]); - - addContext(test, message.fail_removeWhitelist_5); - assert.fail(message.fail_removeWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple whitelisted addresses on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // make whitelisted addresses - await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - // remove whitelist addresses - let removewhitelist = await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (removewhitelist.includes(constant.remove_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_1); - console.log(message.vali_removeWhitelist_1); - } else { - addContext(test, message.fail_removeWhitelist_1); - assert.fail(message.fail_removeWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the remove whitelist address function of the arka paymaster with multiple random addresses on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the remove whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - await arkaPaymaster.removeWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - addContext(test, message.fail_removeWhitelist_6); - assert.fail(message.fail_removeWhitelist_6); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with invalid address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.invalidSender]); - - addContext(test, message.fail_addWhitelist_3); - assert.fail(message.fail_addWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_3); - console.log(message.vali_addWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with incorrect address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - await arkaPaymaster.addWhitelist([data.incorrectSender]); - - addContext(test, message.fail_addWhitelist_4); - assert.fail(message.fail_addWhitelist_4); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_addWhitelist_4); - console.log(message.vali_addWhitelist_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with random address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_5); - console.log(message.vali_addWhitelist_5); - } else { - addContext(test, message.fail_addWhitelist_7); - assert.fail(message.fail_addWhitelist_7); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with random and whitelisted addresses on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - await arkaPaymaster.addWhitelist([ - randomAddress.address, - data.sender, - ]); - - if (addWhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the add whitelist address function of the arka paymaster with multiple whitelisted addresses on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the add whitelist address - try { - const randomAddress1 = ethers.Wallet.createRandom(); - const randomAddress2 = ethers.Wallet.createRandom(); - - // add whitelist addresses - let addwhitelist = await arkaPaymaster.addWhitelist([ - randomAddress1.address, - randomAddress2.address, - ]); - - if (addwhitelist.includes(constant.add_whitelist_3)) { - addContext(test, message.vali_addWhitelist_1); - console.log(message.vali_addWhitelist_1); - } else { - addContext(test, message.fail_addWhitelist_1); - assert.fail(message.fail_addWhitelist_1); - } - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.add_whitelist_2)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster with invalid address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_2); - assert.fail(message.fail_checkWhitelist_2); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_1); - console.log(message.vali_checkWhitelist_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster with incorrect address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(data.invalidSender); - - addContext(test, message.fail_checkWhitelist_3); - assert.fail(message.fail_checkWhitelist_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_2); - console.log(message.vali_checkWhitelist_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster with random address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - const randomAddress = ethers.Wallet.createRandom(); - let checkwhitelist = await arkaPaymaster.checkWhitelist( - randomAddress.address - ); - - if (checkwhitelist.includes(constant.check_whitelist_1)) { - addContext(test, message.vali_addWhitelist_2); - console.log(message.vali_addWhitelist_2); - } else if (checkwhitelist.includes(constant.check_whitelist_2)) { - addContext(test, message.vali_removeWhitelist_2); - console.log(message.vali_removeWhitelist_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_4); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the check whitelist function of the arka paymaster without address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the whilelist address - try { - await arkaPaymaster.checkWhitelist(); - - addContext(test, message.fail_checkWhitelist_5); - assert.fail(message.fail_checkWhitelist_5); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_address_5)) { - addContext(test, message.vali_checkWhitelist_3); - console.log(message.vali_checkWhitelist_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelist_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Validate the deposit function of the arka paymaster with invalid amount on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // validate the deposit - try { - await arkaPaymaster.deposit('one'); - - addContext(test, message.fail_deposit_3); - assert.fail(message.fail_deposit_3); - } catch (e) { - let errorMessage = e.message; - if (errorMessage.includes(constant.invalid_data)) { - addContext(test, message.vali_deposit_2); - console.log(message.vali_deposit_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deposit_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.arkaFunction_insufficientBalance); - console.warn(message.arkaFunction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/testnet/postcondition/postcondition_newWallet.spec.js b/test/specs/testnet/postcondition/postcondition_newWallet.spec.js new file mode 100644 index 0000000..a62fb20 --- /dev/null +++ b/test/specs/testnet/postcondition/postcondition_newWallet.spec.js @@ -0,0 +1,290 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, EtherspotBundler } from '@etherspot/prime-sdk'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import { ethers } from 'ethers'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import { + randomChainId, + randomChainName, + randomProviderNetwork, + randomTokenAddress, +} from '../../../utils/sharedData_testnet.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let testnetPrimeSdk; +let testnetPrimeSdk_old; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Perform the postcondition for new wallet fund', function () { + it( + 'POSTCONDITION1: Initialize the modular sdk for new private key on the ' + + randomChainName + + ' network', + async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + await customRetryAsync(async function () { + // wait for the execution + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { + privateKey: sharedState.newPrivateKey, + }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + }, data.retry); // Retry this async test up to 3 times + } + ); + + it( + 'POSTCONDITION2: Perform the transfer ERC20 token from new wallet to old wallet on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // get transferFrom encoded data + let transactionData; + balance = balance - 0.001; + const balanceStr = balance.toFixed(3); + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.sender, + ethers.utils.parseUnits(balanceStr, data.erc20_usdc_decimal), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_transferFrom); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash... + try { + console.log('Waiting for transaction...'); + let userOpsReceipt = null; + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + helper.wait(data.mediumTimeout); + userOpsReceipt = await testnetPrimeSdk_old.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + }, data.retry); // Retry this async test up to 5 times + } + ); + + it( + 'POSTCONDITION3: Perform the transfer native token from new wallet to old wallet on the ' + + randomChainName + + ' network', + async function () { + var test = this; + await customRetryAsync(async function () { + helper.wait(data.longTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // add transactions to the batch + let transactionBatch; + try { + balance = balance - 0.0001; + const balanceStr = balance.toFixed(3); + + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.sender, + value: ethers.utils.parseEther(balanceStr), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash... + console.log('Waiting for transaction...'); + let userOpsReceipt = null; + const timeout = Date.now() + 1200000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + helper.wait(data.mediumTimeout); + userOpsReceipt = await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + }, data.retry); // Retry this async test up to 3 times + } + ); +}); diff --git a/test/specs/testnet/precondition/precondition_newWallet.spec.js b/test/specs/testnet/precondition/precondition_newWallet.spec.js new file mode 100644 index 0000000..fbc2e2c --- /dev/null +++ b/test/specs/testnet/precondition/precondition_newWallet.spec.js @@ -0,0 +1,421 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, EtherspotBundler } from '@etherspot/prime-sdk'; +import { randomPrivateKey } from 'etherspot'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import { ethers } from 'ethers'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import { + randomChainId, + randomChainName, + randomProviderNetwork, + randomTokenAddress, +} from '../../../utils/sharedData_testnet.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let testnetPrimeSdk_old; +let testnetPrimeSdk; +let primeAccountAddress; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Perform the precondition for new wallet generation', function () { + it( + 'PRECONDITION1: Create random private key on the ' + + randomChainName + + ' network', + async function () { + // Generate a random private key + const randomPrivateKeyString = randomPrivateKey(); + + console.log('randomPrivateKeyString', randomPrivateKeyString); + + // Store privatekey in utility + + const valueToPersist = { newPrivateKey: randomPrivateKeyString }; + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + fs.writeFileSync(filePath, JSON.stringify(valueToPersist)); + } + ); + + it( + 'PRECONDITION2: Initialize the prime sdk for new private key on the ' + + randomChainName + + ' network', + async function () { + var test = this; + await customRetryAsync(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + // wait for the execution + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get modular account address + try { + primeAccountAddress = + await testnetPrimeSdk.getCounterFactualAddress(); + + console.log('primeAccountAddress', primeAccountAddress); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + }, data.retry); // Retry this async test up to 3 times + } + ); + + it( + 'PRECONDITION3: Perform the transfer native token from old wallet to new wallet on the ' + + randomChainName + + ' network', + async function () { + var test = this; + await customRetryAsync(async function () { + // wait for the execution + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk_old = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // wait for the execution + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk_old.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + + try { + transactionBatch = await testnetPrimeSdk_old.addUserOpsToBatch({ + to: primeAccountAddress, + value: ethers.utils.parseEther(data.newWallet_value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk_old.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk_old.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash... + try { + console.log('Waiting for transaction...'); + let userOpsReceipt = null; + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + helper.wait(data.mediumTimeout); + userOpsReceipt = await testnetPrimeSdk_old.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + }, data.retry); // Retry this async test up to 3 times + } + ); + + it( + 'PRECONDITION4: Perform the transfer ERC20 token from old wallet to new wallet on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + primeAccountAddress, + ethers.utils.parseUnits( + data.newWallet_erc20value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_transferFrom); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash... + try { + console.log('Waiting for transaction...'); + let userOpsReceipt = null; + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + helper.wait(data.mediumTimeout); + userOpsReceipt = await testnetPrimeSdk_old.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + }, data.retry); // Retry this async test up to 5 times + } + ); + + it( + 'PRECONDITION5: Perform the transfer native token from new wallet to old wallet on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + await customRetryAsync(async function () { + // wait for the execution + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.sender, + value: ethers.utils.parseEther(balance), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash... + try { + console.log('Waiting for transaction...'); + let userOpsReceipt = null; + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + helper.wait(data.mediumTimeout); + userOpsReceipt = await testnetPrimeSdk_old.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + }, data.retry); // Retry this async test up to 5 times + } + ); +}); diff --git a/test/specs/testnet/swap/newWallet_swap.spec.js b/test/specs/testnet/swap/newWallet_swap.spec.js new file mode 100644 index 0000000..4a55d23 --- /dev/null +++ b/test/specs/testnet/swap/newWallet_swap.spec.js @@ -0,0 +1,2380 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { utils, constants, BigNumber } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomIncorrectToTokenAddress, + randomIncorrectTokenAddress, + randomInvalidToTokenAddress, + randomInvalidTokenAddress, + randomInvalidTokenAddressUsdt, + randomToChainId, + randomToTokenAddress, + randomTokenAddress, + randomTokenAddressUsdt, +} from '../../../utils/sharedData_testnet.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let testnetPrimeSdk; +let nativeAddress = null; +let dataService; +let runTest; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Perform the get cross chain quotes and get advance routes LiFi transaction details on the TestNet (with new wallet)', function () { + before(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }); + + it( + 'SMOKE: Validate the Exchange offers response with ERC20 to ERC20 and valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].address, + message.vali_exchangeOffers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].chainId, + message.vali_exchangeOffers_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].name, + message.vali_exchangeOffers_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].symbol, + message.vali_exchangeOffers_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].decimals, + message.vali_exchangeOffers_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].logoURI, + message.vali_exchangeOffers_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + let offers; + try { + let fromChainId = randomChainId; + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_value; + + offers = await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + if (offers.length > 0) { + for (let i = 0; i < offers.length; i++) { + try { + assert.isNotEmpty( + offers[i].provider, + message.vali_exchangeOffers_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].receiveAmount, + message.vali_exchangeOffers_receiveAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + offers[i].exchangeRate, + message.vali_exchangeOffers_exchangeRate + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].transactions, + message.vali_exchangeOffers_transactions + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } else { + addContext(test, message.vali_exchangeOffers_3); + console.log(message.vali_exchangeOffers_3); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the Exchange offers response with ERC20 to Native Token and valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].address, + message.vali_exchangeOffers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].chainId, + message.vali_exchangeOffers_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].name, + message.vali_exchangeOffers_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].symbol, + message.vali_exchangeOffers_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].decimals, + message.vali_exchangeOffers_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].logoURI, + message.vali_exchangeOffers_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + let offers; + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = constants.AddressZero; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + offers = await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + if (offers.length > 0) { + for (let i = 0; i < offers.length; i++) { + try { + assert.isNotEmpty( + offers[i].provider, + message.vali_exchangeOffers_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].receiveAmount, + message.vali_exchangeOffers_receiveAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + offers[i].exchangeRate, + message.vali_exchangeOffers_exchangeRate + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].transactions, + message.vali_exchangeOffers_transactions + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } else { + addContext(test, message.vali_exchangeOffers_3); + console.log(message.vali_exchangeOffers_3); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the getCrossChainQuotes response with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + let quotes; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + quotes = await dataService.getCrossChainQuotes(quoteRequestPayload); + + if (quotes.items.length > 0) { + try { + assert.isNotEmpty( + quotes.items[0].provider, + message.vali_crossChainQuotes_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.data, + message.vali_crossChainQuotes_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.to, + message.vali_crossChainQuotes_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.value, + message.vali_crossChainQuotes_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.from, + message.vali_crossChainQuotes_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + quotes.items[0].transaction.chainId, + message.vali_crossChainQuotes_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_crossChainQuotes_1); + console.log(message.vali_crossChainQuotes_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the getAdvanceRoutesLiFi response with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + let quotes; + let stepTransaction; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + quotes = + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + if (quotes.items.length > 0) { + const quote = quotes.items[0]; // Selected the first route + stepTransaction = await dataService.getStepTransaction({ + route: quote, + account: data.sender, + }); + + try { + assert.isNotEmpty( + quotes.items[0].id, + message.vali_advanceRoutesLiFi_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + quotes.items[0].fromChainId, + message.vali_advanceRoutesLiFi_fromChainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromAmountUSD, + message.vali_advanceRoutesLiFi_fromAmountUSD + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromAmount, + message.vali_advanceRoutesLiFi_fromAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromToken, + message.vali_advanceRoutesLiFi_fromToken + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromAddress, + message.vali_advanceRoutesLiFi_fromAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + quotes.items[0].toChainId, + message.vali_advanceRoutesLiFi_toChainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAmountUSD, + message.vali_advanceRoutesLiFi_toAmountUSD + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAmount, + message.vali_advanceRoutesLiFi_toAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAmountMin, + message.vali_advanceRoutesLiFi_toAmountMin + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toToken, + message.vali_advanceRoutesLiFi_toToken + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAddress, + message.vali_advanceRoutesLiFi_toAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].gasCostUSD, + message.vali_advanceRoutesLiFi_gasCostUSD + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].data, + message.vali_stepTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].value, + message.vali_stepTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].to, + message.vali_stepTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + stepTransaction.items[0].chainId, + message.vali_stepTransaction_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].transactionType, + message.vali_stepTransaction_transactionType + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].transactionType, + message.vali_stepTransaction_transactionType + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].data, + message.vali_stepTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].to, + message.vali_stepTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].value, + message.vali_stepTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + stepTransaction.items[1].chainId, + message.vali_stepTransaction_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].gasLimit, + message.vali_stepTransaction_gasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].gasPrice, + message.vali_stepTransaction_gasPrice + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_advanceRoutesLiFi_1); + console.log(message.vali_advanceRoutesLiFi_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_advanceRoutesLiFi_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with invalid fromTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].address, + message.vali_exchangeOffers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].chainId, + message.vali_exchangeOffers_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].name, + message.vali_exchangeOffers_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].symbol, + message.vali_exchangeOffers_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].decimals, + message.vali_exchangeOffers_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].logoURI, + message.vali_exchangeOffers_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomInvalidTokenAddress; // Invalid fromTokenAddress + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_2); + assert.fail(message.fail_exchangeOffers_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_exchangeOffers_4); + console.log(message.vali_exchangeOffers_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response without fromTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + // without fromTokenAddress + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_3); + assert.fail(message.fail_exchangeOffers_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_exchangeOffers_5); + console.log(message.vali_exchangeOffers_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with invalid toTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomInvalidTokenAddressUsdt; // Invalid toTokenAddress + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_4); + assert.fail(message.fail_exchangeOffers_4); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_exchangeOffers_6); + console.log(message.vali_exchangeOffers_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response without toTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + // without toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_5); + assert.fail(message.fail_exchangeOffers_5); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_exchangeOffers_7); + console.log(message.vali_exchangeOffers_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with invalid fromAmount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.invalidValue; // invalid fromAmount + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_6); + assert.fail(message.fail_exchangeOffers_6); + } catch (e) { + if (e.reason === constant.invalid_bignumber_1) { + addContext(test, message.vali_exchangeOffers_8); + console.log(message.vali_exchangeOffers_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with decimal fromAmount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_decimal_value; // decimal fromAmount + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_7); + assert.fail(message.fail_exchangeOffers_7); + } catch (e) { + if (e.reason === constant.invalid_bignumber_1) { + addContext(test, message.vali_exchangeOffers_9); + console.log(message.vali_exchangeOffers_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response without fromAmount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + // without fromAmount + }); + + addContext(test, message.fail_exchangeOffers_9); + assert.fail(message.fail_exchangeOffers_9); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.IsBigNumberish === + constant.invalid_bignumber_2 + ) { + addContext(test, message.vali_exchangeOffers_11); + console.log(message.vali_exchangeOffers_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without fromChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_2); + assert.fail(message.fail_crossChainQuotes_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_1) { + addContext(test, message.vali_crossChainQuotes_2); + console.log(message.vali_crossChainQuotes_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without toChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_3); + assert.fail(message.fail_crossChainQuotes_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_2) { + addContext(test, message.vali_crossChainQuotes_3); + console.log(message.vali_crossChainQuotes_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with invalid fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomInvalidTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_4); + assert.fail(message.fail_crossChainQuotes_4); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_crossChainQuotes_4); + console.log(message.vali_crossChainQuotes_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with incorrect fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomIncorrectTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_5); + assert.fail(message.fail_crossChainQuotes_5); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_crossChainQuotes_5); + console.log(message.vali_crossChainQuotes_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_6); + assert.fail(message.fail_crossChainQuotes_6); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_crossChainQuotes_6); + console.log(message.vali_crossChainQuotes_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with invalid toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomInvalidToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_7); + assert.fail(message.fail_crossChainQuotes_7); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_crossChainQuotes_7); + console.log(message.vali_crossChainQuotes_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with incorrect toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomIncorrectToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_8); + assert.fail(message.fail_crossChainQuotes_8); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_crossChainQuotes_8); + console.log(message.vali_crossChainQuotes_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_9); + assert.fail(message.fail_crossChainQuotes_9); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_crossChainQuotes_9); + console.log(message.vali_crossChainQuotes_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with invalid fromAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.invalidSender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_10); + assert.fail(message.fail_crossChainQuotes_10); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_crossChainQuotes_10); + assert.fail(message.vali_crossChainQuotes_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with incorrect fromAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.incorrectSender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_11); + assert.fail(message.fail_crossChainQuotes_11); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_crossChainQuotes_11); + assert.fail(message.vali_crossChainQuotes_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + addContext(test, message.fail_crossChainQuotes_12); + assert.fail(message.fail_crossChainQuotes_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without fromAmount detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_12); + assert.fail(message.fail_crossChainQuotes_12); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_crossChainQuotes_12); + console.log(message.vali_crossChainQuotes_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_12); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without fromChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_2); + assert.fail(fail_advanceRoutesLiFi_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_1) { + addContext(test, message.vali_advanceRoutesLiFi_2); + console.log(message.vali_advanceRoutesLiFi_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without toChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_3); + assert.fail(fail_advanceRoutesLiFi_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_2) { + addContext(test, message.vali_advanceRoutesLifi_3); + console.log(message.vali_advanceRoutesLifi_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomInvalidTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_4); + assert.fail(fail_advanceRoutesLiFi_4); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_advanceRoutesLifi_4); + console.log(message.vali_advanceRoutesLifi_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomIncorrectTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_5); + assert.fail(fail_advanceRoutesLiFi_5); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_advanceRoutesLifi_5); + console.log(message.vali_advanceRoutesLifi_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_6); + assert.fail(fail_advanceRoutesLiFi_6); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_advanceRoutesLifi_6); + console.log(message.vali_advanceRoutesLifi_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomInvalidToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_7); + assert.fail(fail_advanceRoutesLiFi_7); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_7); + console.log(message.vali_advanceRoutesLifi_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomIncorrectToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_8); + assert.fail(fail_advanceRoutesLiFi_8); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_8); + console.log(message.vali_advanceRoutesLifi_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_9); + assert.fail(fail_advanceRoutesLiFi_9); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_9); + console.log(message.vali_advanceRoutesLifi_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without fromAmount detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_10); + assert.fail(fail_advanceRoutesLiFi_10); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.IsBigNumberish === + constant.invalid_bignumber_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_10); + console.log(message.vali_advanceRoutesLifi_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/testnet/swap/oldWallet_swap.spec.js b/test/specs/testnet/swap/oldWallet_swap.spec.js new file mode 100644 index 0000000..6728935 --- /dev/null +++ b/test/specs/testnet/swap/oldWallet_swap.spec.js @@ -0,0 +1,2412 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { utils, constants, BigNumber } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomIncorrectToTokenAddress, + randomIncorrectTokenAddress, + randomInvalidToTokenAddress, + randomInvalidTokenAddress, + randomInvalidTokenAddressUsdt, + randomToChainId, + randomToTokenAddress, + randomTokenAddress, + randomTokenAddressUsdt, +} from '../../../utils/sharedData_testnet.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; + +let testnetPrimeSdk; +let etherspotWalletAddress; +let nativeAddress = null; +let dataService; +let runTest; + +describe('Perform the get cross chain quotes and get advance routes LiFi transaction details on the TestNet (with old wallet)', function () { + before(async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + + try { + assert.strictEqual( + testnetPrimeSdk.state.EOAAddress, + data.eoaAddress, + message.vali_eoa_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get EtherspotWallet address + try { + etherspotWalletAddress = + await testnetPrimeSdk.getCounterFactualAddress(); + + try { + assert.strictEqual( + etherspotWalletAddress, + data.sender, + message.vali_smart_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e.message); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + var test = this; + + await customRetryAsync(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + it( + 'SMOKE: Validate the Exchange offers response with ERC20 to ERC20 and valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].address, + message.vali_exchangeOffers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].chainId, + message.vali_exchangeOffers_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].name, + message.vali_exchangeOffers_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].symbol, + message.vali_exchangeOffers_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].decimals, + message.vali_exchangeOffers_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].logoURI, + message.vali_exchangeOffers_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + let offers; + try { + let fromChainId = randomChainId; + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_value; + + offers = await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + if (offers.length > 0) { + for (let i = 0; i < offers.length; i++) { + try { + assert.isNotEmpty( + offers[i].provider, + message.vali_exchangeOffers_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].receiveAmount, + message.vali_exchangeOffers_receiveAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + offers[i].exchangeRate, + message.vali_exchangeOffers_exchangeRate + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].transactions, + message.vali_exchangeOffers_transactions + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } else { + addContext(test, message.vali_exchangeOffers_3); + console.log(message.vali_exchangeOffers_3); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the Exchange offers response with ERC20 to Native Token and valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].address, + message.vali_exchangeOffers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].chainId, + message.vali_exchangeOffers_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].name, + message.vali_exchangeOffers_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].symbol, + message.vali_exchangeOffers_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].decimals, + message.vali_exchangeOffers_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].logoURI, + message.vali_exchangeOffers_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + let offers; + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = constants.AddressZero; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + offers = await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + if (offers.length > 0) { + for (let i = 0; i < offers.length; i++) { + try { + assert.isNotEmpty( + offers[i].provider, + message.vali_exchangeOffers_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].receiveAmount, + message.vali_exchangeOffers_receiveAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + offers[i].exchangeRate, + message.vali_exchangeOffers_exchangeRate + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + offers[i].transactions, + message.vali_exchangeOffers_transactions + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } else { + addContext(test, message.vali_exchangeOffers_3); + console.log(message.vali_exchangeOffers_3); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Validate the getCrossChainQuotes response with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + let quotes; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + quotes = await dataService.getCrossChainQuotes(quoteRequestPayload); + + if (quotes.items.length > 0) { + try { + assert.isNotEmpty( + quotes.items[0].provider, + message.vali_crossChainQuotes_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.data, + message.vali_crossChainQuotes_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.to, + message.vali_crossChainQuotes_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.value, + message.vali_crossChainQuotes_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].transaction.from, + message.vali_crossChainQuotes_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + quotes.items[0].transaction.chainId, + message.vali_crossChainQuotes_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_crossChainQuotes_1); + console.log(message.vali_crossChainQuotes_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the getAdvanceRoutesLiFi response with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + let quotes; + let stepTransaction; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + quotes = + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + if (quotes.items.length > 0) { + const quote = quotes.items[0]; // Selected the first route + stepTransaction = await dataService.getStepTransaction({ + route: quote, + account: data.sender, + }); + + try { + assert.isNotEmpty( + quotes.items[0].id, + message.vali_advanceRoutesLiFi_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + quotes.items[0].fromChainId, + message.vali_advanceRoutesLiFi_fromChainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromAmountUSD, + message.vali_advanceRoutesLiFi_fromAmountUSD + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromAmount, + message.vali_advanceRoutesLiFi_fromAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromToken, + message.vali_advanceRoutesLiFi_fromToken + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].fromAddress, + message.vali_advanceRoutesLiFi_fromAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + quotes.items[0].toChainId, + message.vali_advanceRoutesLiFi_toChainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAmountUSD, + message.vali_advanceRoutesLiFi_toAmountUSD + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAmount, + message.vali_advanceRoutesLiFi_toAmount + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAmountMin, + message.vali_advanceRoutesLiFi_toAmountMin + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toToken, + message.vali_advanceRoutesLiFi_toToken + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].toAddress, + message.vali_advanceRoutesLiFi_toAddress + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + quotes.items[0].gasCostUSD, + message.vali_advanceRoutesLiFi_gasCostUSD + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].data, + message.vali_stepTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].value, + message.vali_stepTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].to, + message.vali_stepTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + stepTransaction.items[0].chainId, + message.vali_stepTransaction_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[0].transactionType, + message.vali_stepTransaction_transactionType + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].transactionType, + message.vali_stepTransaction_transactionType + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].data, + message.vali_stepTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].to, + message.vali_stepTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].value, + message.vali_stepTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + stepTransaction.items[1].chainId, + message.vali_stepTransaction_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].gasLimit, + message.vali_stepTransaction_gasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + stepTransaction.items[1].gasPrice, + message.vali_stepTransaction_gasPrice + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_advanceRoutesLiFi_1); + console.log(message.vali_advanceRoutesLiFi_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_advanceRoutesLiFi_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with invalid fromTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].address, + message.vali_exchangeOffers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].chainId, + message.vali_exchangeOffers_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].name, + message.vali_exchangeOffers_name + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].symbol, + message.vali_exchangeOffers_symbol + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + exchangeSupportedAssets.items[0].decimals, + message.vali_exchangeOffers_decimals + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + exchangeSupportedAssets.items[0].logoURI, + message.vali_exchangeOffers_logoURI + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomInvalidTokenAddress; // Invalid fromTokenAddress + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_2); + assert.fail(message.fail_exchangeOffers_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_exchangeOffers_4); + console.log(message.vali_exchangeOffers_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response without fromTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + // without fromTokenAddress + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_3); + assert.fail(message.fail_exchangeOffers_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_exchangeOffers_5); + console.log(message.vali_exchangeOffers_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with invalid toTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomInvalidTokenAddressUsdt; // Invalid toTokenAddress + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_4); + assert.fail(message.fail_exchangeOffers_4); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_exchangeOffers_6); + console.log(message.vali_exchangeOffers_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response without toTokenAddress details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let fromAmount = data.exchange_offer_value; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + // without toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_5); + assert.fail(message.fail_exchangeOffers_5); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_exchangeOffers_7); + console.log(message.vali_exchangeOffers_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with invalid fromAmount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.invalidValue; // invalid fromAmount + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_6); + assert.fail(message.fail_exchangeOffers_6); + } catch (e) { + if (e.reason === constant.invalid_bignumber_1) { + addContext(test, message.vali_exchangeOffers_8); + console.log(message.vali_exchangeOffers_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response with decimal fromAmount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromAmount = data.exchange_offer_decimal_value; // decimal fromAmount + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + fromAmount: BigNumber.from(fromAmount), + }); + + addContext(test, message.fail_exchangeOffers_7); + assert.fail(message.fail_exchangeOffers_7); + } catch (e) { + if (e.reason === constant.invalid_bignumber_1) { + addContext(test, message.vali_exchangeOffers_9); + console.log(message.vali_exchangeOffers_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the Exchange offers response without fromAmount on the ' + + randomChainName + + ' network', + async function () { + var test = this; + let exchangeSupportedAssets; + if (runTest) { + await customRetryAsync(async function () { + exchangeSupportedAssets = + await dataService.getExchangeSupportedAssets({ + page: 1, + limit: 100, + account: data.sender, + chainId: Number(randomChainId), + }); + + try { + if (exchangeSupportedAssets.items.length > 0) { + addContext(test, message.vali_exchangeOffers_1); + console.log(message.vali_exchangeOffers_1); + } else { + addContext(test, message.vali_exchangeOffers_2); + console.error(message.vali_exchangeOffers_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + let fromAddress = data.sender; + let fromTokenAddress = randomTokenAddress; + let toTokenAddress = randomTokenAddressUsdt; + let fromChainId = randomChainId; + + await dataService.getExchangeOffers({ + fromAddress, + fromChainId, + fromTokenAddress, + toTokenAddress, + // without fromAmount + }); + + addContext(test, message.fail_exchangeOffers_9); + assert.fail(message.fail_exchangeOffers_9); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.IsBigNumberish === + constant.invalid_bignumber_2 + ) { + addContext(test, message.vali_exchangeOffers_11); + console.log(message.vali_exchangeOffers_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_exchangeOffers_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.exchangeOffers_insufficientBalance); + console.warn(message.exchangeOffers_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without fromChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_2); + assert.fail(message.fail_crossChainQuotes_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_1) { + addContext(test, message.vali_crossChainQuotes_2); + console.log(message.vali_crossChainQuotes_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without toChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_3); + assert.fail(message.fail_crossChainQuotes_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_2) { + addContext(test, message.vali_crossChainQuotes_3); + console.log(message.vali_crossChainQuotes_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with invalid fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomInvalidTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_4); + assert.fail(message.fail_crossChainQuotes_4); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_crossChainQuotes_4); + console.log(message.vali_crossChainQuotes_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with incorrect fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomIncorrectTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_5); + assert.fail(message.fail_crossChainQuotes_5); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_crossChainQuotes_5); + console.log(message.vali_crossChainQuotes_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_6); + assert.fail(message.fail_crossChainQuotes_6); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_crossChainQuotes_6); + console.log(message.vali_crossChainQuotes_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with invalid toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomInvalidToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_7); + assert.fail(message.fail_crossChainQuotes_7); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_crossChainQuotes_7); + console.log(message.vali_crossChainQuotes_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with incorrect toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomIncorrectToTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_8); + assert.fail(message.fail_crossChainQuotes_8); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_crossChainQuotes_8); + console.log(message.vali_crossChainQuotes_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + fromAddress: data.sender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_9); + assert.fail(message.fail_crossChainQuotes_9); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_crossChainQuotes_9); + console.log(message.vali_crossChainQuotes_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with invalid fromAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.invalidSender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_10); + assert.fail(message.fail_crossChainQuotes_10); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_crossChainQuotes_10); + assert.fail(message.vali_crossChainQuotes_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response with incorrect fromAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.incorrectSender, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_11); + assert.fail(message.fail_crossChainQuotes_11); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_crossChainQuotes_11); + assert.fail(message.vali_crossChainQuotes_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + addContext(test, message.fail_crossChainQuotes_12); + assert.fail(message.fail_crossChainQuotes_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Validate the getCrossChainQuotes response without fromAmount detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAddress: data.sender, + }; + + await dataService.getCrossChainQuotes(quoteRequestPayload); + + addContext(test, message.fail_crossChainQuotes_12); + assert.fail(message.fail_crossChainQuotes_12); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_crossChainQuotes_12); + console.log(message.vali_crossChainQuotes_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_crossChainQuotes_12); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.crossChainQuotes_insufficientBalance); + console.warn(message.crossChainQuotes_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without fromChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_2); + assert.fail(fail_advanceRoutesLiFi_2); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_1) { + addContext(test, message.vali_advanceRoutesLiFi_2); + console.log(message.vali_advanceRoutesLiFi_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without toChainId detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_3); + assert.fail(fail_advanceRoutesLiFi_3); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if (errorResponse[0].property === constant.invalid_chainid_2) { + addContext(test, message.vali_advanceRoutesLifi_3); + console.log(message.vali_advanceRoutesLifi_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomInvalidTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_4); + assert.fail(fail_advanceRoutesLiFi_4); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_advanceRoutesLifi_4); + console.log(message.vali_advanceRoutesLifi_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomIncorrectTokenAddress, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_5); + assert.fail(fail_advanceRoutesLiFi_5); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_advanceRoutesLifi_5); + console.log(message.vali_advanceRoutesLifi_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without fromTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + toTokenAddress: randomToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_6); + assert.fail(fail_advanceRoutesLiFi_6); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_2 + ) { + addContext(test, message.vali_advanceRoutesLifi_6); + console.log(message.vali_advanceRoutesLifi_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with invalid toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomInvalidToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_7); + assert.fail(fail_advanceRoutesLiFi_7); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_7); + console.log(message.vali_advanceRoutesLifi_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response with incorrect toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomIncorrectToTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_8); + assert.fail(fail_advanceRoutesLiFi_8); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_8); + console.log(message.vali_advanceRoutesLifi_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without toTokenAddress detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + fromAmount: utils.parseUnits(data.swap_value, 6), + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_9); + assert.fail(fail_advanceRoutesLiFi_9); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_9); + console.log(message.vali_advanceRoutesLifi_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the getAdvanceRoutesLiFi response without fromAmount detail on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + let quoteRequestPayload; + try { + quoteRequestPayload = { + fromAddress: data.sender, + fromChainId: randomChainId, + toChainId: randomToChainId, + fromTokenAddress: randomTokenAddress, + toTokenAddress: randomToTokenAddress, + }; + + await dataService.getAdvanceRoutesLiFi(quoteRequestPayload); + + addContext(test, fail_advanceRoutesLiFi_10); + assert.fail(fail_advanceRoutesLiFi_10); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.IsBigNumberish === + constant.invalid_bignumber_3 + ) { + addContext(test, message.vali_advanceRoutesLifi_10); + console.log(message.vali_advanceRoutesLifi_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(fail_advanceRoutesLiFi_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.advanceRoutesLiFi_insufficientBalance); + console.warn(message.advanceRoutesLiFi_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/testnet/transactionHistory/newWallet_transactionHistory.spec.js b/test/specs/testnet/transactionHistory/newWallet_transactionHistory.spec.js new file mode 100644 index 0000000..521d6e1 --- /dev/null +++ b/test/specs/testnet/transactionHistory/newWallet_transactionHistory.spec.js @@ -0,0 +1,1766 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers, utils, providers } from 'ethers'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import helper from '../../../utils/helper.js'; +import { + randomChainId, + randomChainName, + randomProviderNetwork, + randomTokenAddress, +} from '../../../utils/sharedData_testnet.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let testnetPrimeSdk; +let nativeAddress = null; +let dataService; +let runTest; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Validate the single transaction and multiple transaction details on the TestNet (with new wallet)', function () { + before(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }); + + it( + 'SMOKE: Validate the transaction history of the native token transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // get single transaction history details + let transactionHash; + let singleTransaction; + + if (!(userOpsReceipt === null)) { + try { + transactionHash = userOpsReceipt.receipt.transactionHash; + singleTransaction = await dataService.getTransaction({ + hash: transactionHash, + chainId: Number(randomChainId), + }); + + try { + assert.isNumber( + singleTransaction.chainId, + message.vali_getTransaction_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.hash, + message.vali_getTransaction_hash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.nonce, + message.vali_getTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.blockHash, + message.vali_getTransaction_blockHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.blockNumber, + message.vali_getTransaction_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.from, + message.vali_getTransaction_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.to, + message.vali_getTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.value, + message.vali_getTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.gasPrice, + message.vali_getTransaction_gasPrice + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.gasLimit, + message.vali_getTransaction_gasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.input, + message.vali_getTransaction_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.status, + message.vali_getTransaction_status + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.blockExplorerUrl, + message.vali_getTransaction_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.transactionIndex, + message.vali_getTransaction_transactionIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.gasUsed, + message.vali_getTransaction_gasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.logs[0].transactionIndex, + message.vali_getTransaction_log_transactionIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.logs[0].blockNumber, + message.vali_getTransaction_log_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].transactionHash, + message.vali_getTransaction_log_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].address, + message.vali_getTransaction_log_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].topics, + message.vali_getTransaction_log_topics + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].data, + message.vali_getTransaction_log_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.logs[0].logIndex, + message.vali_getTransaction_log_logIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].blockHash, + message.vali_getTransaction_log_blockHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransaction_1); + } + } else { + addContext(test, message.vali_getTransaction_1); + console.log(message.vali_getTransaction_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transactions history response with random transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // Fetching historical transactions + let transactions; + let randomTransaction; + + try { + transactions = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.sender, + }); + + randomTransaction = + Math.floor( + Math.random() * (transactions.transactions.length - 1) + ) + 1; + + try { + assert.isNumber( + transactions.transactions[randomTransaction].chainId, + message.vali_getTransactions_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].sender, + message.vali_getTransactions_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].transactionHash, + message.vali_getTransactions_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].userOpHash, + message.vali_getTransactions_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].actualGasCost, + message.vali_getTransactions_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].actualGasUsed, + message.vali_getTransactions_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].success, + message.vali_getTransactions_success + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].timestamp, + message.vali_getTransactions_timestamp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].paymaster, + message.vali_getTransactions_paymaster + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].value, + message.vali_getTransactions_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].blockExplorerUrl, + message.vali_getTransactions_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].input, + message.vali_getTransactions_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].nonce, + message.vali_getTransactions_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].initCode, + message.vali_getTransactions_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].callData, + message.vali_getTransactions_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction] + .verificationGasLimit, + message.vali_getTransactions_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].preVerificationGas, + message.vali_getTransactions_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].maxFeePerGas, + message.vali_getTransactions_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction] + .maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transactions history response of the native transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // wait for the 10 seconds + helper.wait(10000); + + // Fetching historical transactions + let transactions; + try { + transactions = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.sender, + page: 1, + limit: 10, + }); + + if (userOpsReceipt != null) { + try { + assert.isNumber( + transactions.transactions[0].chainId, + message.vali_getTransactions_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].blockNumber, + message.vali_getTransactions_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].sender, + message.vali_getTransactions_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].target, + message.vali_getTransactions_target + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].transactionHash, + message.vali_getTransactions_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].userOpHash, + message.vali_getTransactions_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasCost, + message.vali_getTransactions_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasUsed, + message.vali_getTransactions_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].success, + message.vali_getTransactions_success + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].timestamp, + message.vali_getTransactions_timestamp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].paymaster, + message.vali_getTransactions_paymaster + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].value, + message.vali_getTransactions_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].blockExplorerUrl, + message.vali_getTransactions_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].input, + message.vali_getTransactions_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].nonce, + message.vali_getTransactions_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].initCode, + message.vali_getTransactions_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].callData, + message.vali_getTransactions_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].verificationGasLimit, + message.vali_getTransactions_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].preVerificationGas, + 'The preVerificationGas value is empty in the get transactions response.' + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxFeePerGas, + message.vali_getTransactions_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].from, + message.vali_getTransactions_nativeTransfers_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].to, + message.vali_getTransactions_nativeTransfers_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].value, + message.vali_getTransactions_nativeTransfers_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].asset, + message.vali_getTransactions_nativeTransfers_asset + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].address, + message.vali_getTransactions_nativeTransfers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].nativeTransfers[0].decimal, + message.vali_getTransactions_nativeTransfers_decimal + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].data, + message.vali_getTransactions_nativeTransfers_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_getTransactions_1); + console.log(message.vali_getTransactions_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transactions history response of the erc20 transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_2); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_3); + } + + // get transfer From encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_5); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // wait for the 10 seconds + helper.wait(10000); + + // Fetching historical transactions + let transactions; + try { + transactions = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.sender, + page: 1, + limit: 10, + }); + + if (userOpsReceipt != null) { + try { + assert.isNumber( + transactions.transactions[0].chainId, + message.vali_getTransactions_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].sender, + message.vali_getTransactions_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].transactionHash, + message.vali_getTransactions_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].userOpHash, + message.vali_getTransactions_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasCost, + message.vali_getTransactions_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasUsed, + message.vali_getTransactions_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].success, + message.vali_getTransactions_success + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].timestamp, + message.vali_getTransactions_timestamp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].paymaster, + message.vali_getTransactions_paymaster + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].value, + message.vali_getTransactions_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].blockExplorerUrl, + message.vali_getTransactions_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].input, + message.vali_getTransactions_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].nonce, + message.vali_getTransactions_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].initCode, + message.vali_getTransactions_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].callData, + message.vali_getTransactions_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].verificationGasLimit, + message.vali_getTransactions_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].preVerificationGas, + message.vali_getTransactions_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxFeePerGas, + message.vali_getTransactions_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].from, + message.vali_getTransactions_erc20Transfers_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].to, + message.vali_getTransactions_erc20Transfers_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].value, + message.vali_getTransactions_erc20Transfers_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].address, + message.vali_getTransactions_erc20Transfers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].erc20Transfers[0].decimal, + message.vali_getTransactions_erc20Transfers_decimal + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].data, + message.vali_getTransactions_erc20Transfers_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_getTransactions_1); + console.log(message.vali_getTransactions_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction history response with invalid hash on ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // Fetching a single transaction + let transaction; + try { + transaction = await dataService.getTransaction({ + hash: data.incorrect_hash, // Incorrect Transaction Hash + chainId: Number(randomChainId), + }); + + if (transaction === null || Object.keys(transaction).length === 0) { + addContext(test, message.vali_getTransactions_2); + console.log(message.vali_getTransactions_2); + } else { + addContext(test, message.fail_getTransactions_6); + assert.fail(message.fail_getTransactions_6); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_6); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction history response when hash hex is not with 32 size on ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // Fetching a single transaction + try { + await dataService.getTransaction({ + hash: data.invalid_hash, // Invalid Transaction Hash + chainId: Number(randomChainId), + }); + + addContext(test, message.fail_getTransactions_7); + assert.fail(message.fail_getTransactions_7); + } catch (e) { + if (e.errors[0].constraints.isHex === constant.hash_32) { + addContext(test, message.vali_getTransactions_3); + console.log(message.vali_getTransactions_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transactions history response with invalid account on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + let a = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.invalidSender, + }); + + addContext(test, message.fail_getTransactions_10); + assert.fail(message.fail_getTransactions_10); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_getTransactions_6); + console.log(message.vali_getTransactions_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transactions history response with incorrect account on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.incorrectSender, + }); + + addContext(test, message.fail_getTransactions_11); + assert.fail(message.fail_getTransactions_11); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_getTransactions_7); + console.log(message.vali_getTransactions_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/testnet/transactionHistory/oldWallet_transactionHistory.spec.js b/test/specs/testnet/transactionHistory/oldWallet_transactionHistory.spec.js new file mode 100644 index 0000000..a6c2590 --- /dev/null +++ b/test/specs/testnet/transactionHistory/oldWallet_transactionHistory.spec.js @@ -0,0 +1,1798 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers, utils, providers } from 'ethers'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import helper from '../../../utils/helper.js'; +import { + randomChainId, + randomChainName, + randomProviderNetwork, + randomTokenAddress, +} from '../../../utils/sharedData_testnet.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; + +let testnetPrimeSdk; +let etherspotWalletAddress; +let nativeAddress = null; +let dataService; +let runTest; + +describe('Validate the single transaction and multiple transaction details on the TestNet (with old wallet)', function () { + before(async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + + try { + assert.strictEqual( + testnetPrimeSdk.state.EOAAddress, + data.eoaAddress, + message.vali_eoa_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get EtherspotWallet address + try { + etherspotWalletAddress = + await testnetPrimeSdk.getCounterFactualAddress(); + + try { + assert.strictEqual( + etherspotWalletAddress, + data.sender, + message.vali_smart_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e.message); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + var test = this; + + await customRetryAsync(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + it( + 'SMOKE: Validate the transaction history of the native token transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // get single transaction history details + let transactionHash; + let singleTransaction; + + if (!(userOpsReceipt === null)) { + try { + transactionHash = userOpsReceipt.receipt.transactionHash; + singleTransaction = await dataService.getTransaction({ + hash: transactionHash, + chainId: Number(randomChainId), + }); + + try { + assert.isNumber( + singleTransaction.chainId, + message.vali_getTransaction_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.hash, + message.vali_getTransaction_hash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.nonce, + message.vali_getTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.blockHash, + message.vali_getTransaction_blockHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.blockNumber, + message.vali_getTransaction_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.from, + message.vali_getTransaction_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.to, + message.vali_getTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.value, + message.vali_getTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.gasPrice, + message.vali_getTransaction_gasPrice + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.gasLimit, + message.vali_getTransaction_gasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.input, + message.vali_getTransaction_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.status, + message.vali_getTransaction_status + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.blockExplorerUrl, + message.vali_getTransaction_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.transactionIndex, + message.vali_getTransaction_transactionIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.gasUsed, + message.vali_getTransaction_gasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.logs[0].transactionIndex, + message.vali_getTransaction_log_transactionIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.logs[0].blockNumber, + message.vali_getTransaction_log_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].transactionHash, + message.vali_getTransaction_log_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].address, + message.vali_getTransaction_log_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].topics, + message.vali_getTransaction_log_topics + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].data, + message.vali_getTransaction_log_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + singleTransaction.logs[0].logIndex, + message.vali_getTransaction_log_logIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + singleTransaction.logs[0].blockHash, + message.vali_getTransaction_log_blockHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransaction_1); + } + } else { + addContext(test, message.vali_getTransaction_1); + console.log(message.vali_getTransaction_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transactions history response with random transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // Fetching historical transactions + let transactions; + let randomTransaction; + + try { + transactions = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.sender, + }); + + randomTransaction = + Math.floor( + Math.random() * (transactions.transactions.length - 1) + ) + 1; + + try { + assert.isNumber( + transactions.transactions[randomTransaction].chainId, + message.vali_getTransactions_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].sender, + message.vali_getTransactions_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].transactionHash, + message.vali_getTransactions_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].userOpHash, + message.vali_getTransactions_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].actualGasCost, + message.vali_getTransactions_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].actualGasUsed, + message.vali_getTransactions_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].success, + message.vali_getTransactions_success + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].timestamp, + message.vali_getTransactions_timestamp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].paymaster, + message.vali_getTransactions_paymaster + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].value, + message.vali_getTransactions_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].blockExplorerUrl, + message.vali_getTransactions_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].input, + message.vali_getTransactions_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[randomTransaction].nonce, + message.vali_getTransactions_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].initCode, + message.vali_getTransactions_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].callData, + message.vali_getTransactions_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction] + .verificationGasLimit, + message.vali_getTransactions_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].preVerificationGas, + message.vali_getTransactions_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction].maxFeePerGas, + message.vali_getTransactions_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[randomTransaction] + .maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transactions history response of the native transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // wait for the 10 seconds + helper.wait(10000); + + // Fetching historical transactions + let transactions; + try { + transactions = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.sender, + page: 1, + limit: 10, + }); + + if (userOpsReceipt != null) { + try { + assert.isNumber( + transactions.transactions[0].chainId, + message.vali_getTransactions_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].blockNumber, + message.vali_getTransactions_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].sender, + message.vali_getTransactions_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].target, + message.vali_getTransactions_target + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].transactionHash, + message.vali_getTransactions_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].userOpHash, + message.vali_getTransactions_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasCost, + message.vali_getTransactions_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasUsed, + message.vali_getTransactions_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].success, + message.vali_getTransactions_success + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].timestamp, + message.vali_getTransactions_timestamp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].paymaster, + message.vali_getTransactions_paymaster + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].value, + message.vali_getTransactions_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].blockExplorerUrl, + message.vali_getTransactions_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].input, + message.vali_getTransactions_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].nonce, + message.vali_getTransactions_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].initCode, + message.vali_getTransactions_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].callData, + message.vali_getTransactions_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].verificationGasLimit, + message.vali_getTransactions_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].preVerificationGas, + 'The preVerificationGas value is empty in the get transactions response.' + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxFeePerGas, + message.vali_getTransactions_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].from, + message.vali_getTransactions_nativeTransfers_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].to, + message.vali_getTransactions_nativeTransfers_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].value, + message.vali_getTransactions_nativeTransfers_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].asset, + message.vali_getTransactions_nativeTransfers_asset + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].address, + message.vali_getTransactions_nativeTransfers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].nativeTransfers[0].decimal, + message.vali_getTransactions_nativeTransfers_decimal + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].nativeTransfers[0].data, + message.vali_getTransactions_nativeTransfers_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_getTransactions_1); + console.log(message.vali_getTransactions_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Validate the get transactions history response of the erc20 transaction on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_2); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_3); + } + + // get transfer From encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_5); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + + // wait for the 10 seconds + helper.wait(10000); + + // Fetching historical transactions + let transactions; + try { + transactions = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.sender, + page: 1, + limit: 10, + }); + + if (userOpsReceipt != null) { + try { + assert.isNumber( + transactions.transactions[0].chainId, + message.vali_getTransactions_chainId + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].sender, + message.vali_getTransactions_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].transactionHash, + message.vali_getTransactions_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].userOpHash, + message.vali_getTransactions_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasCost, + message.vali_getTransactions_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].actualGasUsed, + message.vali_getTransactions_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].success, + message.vali_getTransactions_success + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].timestamp, + message.vali_getTransactions_timestamp + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].paymaster, + message.vali_getTransactions_paymaster + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].value, + message.vali_getTransactions_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].blockExplorerUrl, + message.vali_getTransactions_blockExplorerUrl + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].input, + message.vali_getTransactions_input + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].nonce, + message.vali_getTransactions_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].initCode, + message.vali_getTransactions_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].callData, + message.vali_getTransactions_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].verificationGasLimit, + message.vali_getTransactions_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].preVerificationGas, + message.vali_getTransactions_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxFeePerGas, + message.vali_getTransactions_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].maxPriorityFeePerGas, + message.vali_getTransactions_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].from, + message.vali_getTransactions_erc20Transfers_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].to, + message.vali_getTransactions_erc20Transfers_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].value, + message.vali_getTransactions_erc20Transfers_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].address, + message.vali_getTransactions_erc20Transfers_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + transactions.transactions[0].erc20Transfers[0].decimal, + message.vali_getTransactions_erc20Transfers_decimal + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactions.transactions[0].erc20Transfers[0].data, + message.vali_getTransactions_erc20Transfers_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } else { + addContext(test, message.vali_getTransactions_1); + console.log(message.vali_getTransactions_1); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction history response with invalid hash on ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // Fetching a single transaction + let transaction; + try { + transaction = await dataService.getTransaction({ + hash: data.incorrect_hash, // Incorrect Transaction Hash + chainId: Number(randomChainId), + }); + + if (transaction === null || Object.keys(transaction).length === 0) { + addContext(test, message.vali_getTransactions_2); + console.log(message.vali_getTransactions_2); + } else { + addContext(test, message.fail_getTransactions_6); + assert.fail(message.fail_getTransactions_6); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_6); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transaction history response when hash hex is not with 32 size on ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // Fetching a single transaction + try { + await dataService.getTransaction({ + hash: data.invalid_hash, // Invalid Transaction Hash + chainId: Number(randomChainId), + }); + + addContext(test, message.fail_getTransactions_7); + assert.fail(message.fail_getTransactions_7); + } catch (e) { + if (e.errors[0].constraints.isHex === constant.hash_32) { + addContext(test, message.vali_getTransactions_3); + console.log(message.vali_getTransactions_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transactions history response with invalid account on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + let a = await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.invalidSender, + }); + + addContext(test, message.fail_getTransactions_10); + assert.fail(message.fail_getTransactions_10); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_getTransactions_6); + console.log(message.vali_getTransactions_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Validate the get transactions history response with incorrect account on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + try { + await dataService.getTransactions({ + chainId: Number(randomChainId), + account: data.incorrectSender, + }); + + addContext(test, message.fail_getTransactions_11); + assert.fail(message.fail_getTransactions_11); + } catch (e) { + const errorResponse = JSON.parse(e.message); + if ( + errorResponse[0].constraints.isAddress === + constant.invalid_address_1 + ) { + addContext(test, message.vali_getTransactions_7); + console.log(message.vali_getTransactions_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactions_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.getTransaction_insufficientBalance); + console.warn(message.getTransaction_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/testnet/transactionHistory/sepolia.spec.js b/test/specs/testnet/transactionHistory/sepolia.spec.js deleted file mode 100644 index b782bb2..0000000 --- a/test/specs/testnet/transactionHistory/sepolia.spec.js +++ /dev/null @@ -1,1793 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils } from '@etherspot/prime-sdk'; -import { ethers, utils, providers } from 'ethers'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import helper from '../../../utils/helper.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let sepoliaTestNetSdk; -let sepoliaEtherspotWalletAddress; -let sepoliaNativeAddress = null; -let sepoliaDataService; -let runTest; - -describe('The PrimeSDK, when get the single transaction and multiple transaction details with sepolia network on the TestNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - sepoliaTestNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.sepolia_chainid), - } - ); - - try { - assert.strictEqual( - sepoliaTestNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - sepoliaEtherspotWalletAddress = - await sepoliaTestNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - sepoliaEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - sepoliaDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await sepoliaDataService.getAccountBalances({ - account: data.sender, - chainId: Number(data.sepolia_chainid), - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === sepoliaNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_sepoliaUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Validate the transaction history of the native token transaction on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await sepoliaTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await sepoliaTestNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await sepoliaTestNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // get single transaction history details - let transactionHash; - let singleTransaction; - - if (!(userOpsReceipt === null)) { - try { - transactionHash = userOpsReceipt.receipt.transactionHash; - singleTransaction = await sepoliaDataService.getTransaction({ - hash: transactionHash, - chainId: Number(data.sepolia_chainid), - }); - - try { - assert.isNumber( - singleTransaction.chainId, - message.vali_getTransaction_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.hash, - message.vali_getTransaction_hash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.nonce, - message.vali_getTransaction_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.blockHash, - message.vali_getTransaction_blockHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.blockNumber, - message.vali_getTransaction_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.from, - message.vali_getTransaction_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.to, - message.vali_getTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.value, - message.vali_getTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.gasPrice, - message.vali_getTransaction_gasPrice - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.gasLimit, - message.vali_getTransaction_gasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.input, - message.vali_getTransaction_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.status, - message.vali_getTransaction_status - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.blockExplorerUrl, - message.vali_getTransaction_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.transactionIndex, - message.vali_getTransaction_transactionIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.gasUsed, - message.vali_getTransaction_gasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].transactionIndex, - message.vali_getTransaction_log_transactionIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].blockNumber, - message.vali_getTransaction_log_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].transactionHash, - message.vali_getTransaction_log_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].address, - message.vali_getTransaction_log_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].topics, - message.vali_getTransaction_log_topics - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].data, - message.vali_getTransaction_log_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - singleTransaction.logs[0].logIndex, - message.vali_getTransaction_log_logIndex - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - singleTransaction.logs[0].blockHash, - message.vali_getTransaction_log_blockHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransaction_1); - } - } else { - addContext(test, message.vali_getTransaction_1); - console.log(message.vali_getTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response with random transaction in sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // Fetching historical transactions - let transactions; - let randomTransaction; - - try { - transactions = await sepoliaDataService.getTransactions({ - chainId: Number(data.sepolia_chainid), - account: data.sender, - }); - - randomTransaction = - Math.floor(Math.random() * (transactions.transactions.length - 1)) + - 1; - - try { - assert.isNumber( - transactions.transactions[randomTransaction].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[randomTransaction].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].preVerificationGas, - message.vali_getTransactions_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[randomTransaction].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response of the native transaction in sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await sepoliaTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await sepoliaTestNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await sepoliaTestNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // wait for the 10 seconds - helper.wait(10000); - - // Fetching historical transactions - let transactions; - try { - transactions = await sepoliaDataService.getTransactions({ - chainId: Number(data.sepolia_chainid), - account: data.sender, - page: 1, - limit: 10, - }); - - if (userOpsReceipt != null) { - try { - assert.isNumber( - transactions.transactions[0].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].blockNumber, - message.vali_getTransactions_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].target, - message.vali_getTransactions_target - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].preVerificationGas, - 'The preVerificationGas value is empty in the get transactions response.' - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].from, - message.vali_getTransactions_nativeTransfers_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].to, - message.vali_getTransactions_nativeTransfers_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].value, - message.vali_getTransactions_nativeTransfers_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].asset, - message.vali_getTransactions_nativeTransfers_asset - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].address, - message.vali_getTransactions_nativeTransfers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nativeTransfers[0].decimal, - message.vali_getTransactions_nativeTransfers_decimal - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].nativeTransfers[0].data, - message.vali_getTransactions_nativeTransfers_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_getTransactions_1); - console.log(message.vali_getTransactions_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Validate the get transactions history response of the erc20 transaction in sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_2); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_3); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_4); - } - - // get transfer From encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_5); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await sepoliaTestNetSdk.estimate(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await sepoliaTestNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - // get transaction hash - let userOpsReceipt = null; - try { - console.log('Waiting for transaction...'); - const timeout = Date.now() + 60000; // 1 minute timeout - while (userOpsReceipt == null && Date.now() < timeout) { - await helper.wait(5000); - userOpsReceipt = await sepoliaTestNetSdk.getUserOpReceipt(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactionHash_1); - } - - // wait for the 10 seconds - helper.wait(10000); - - // Fetching historical transactions - let transactions; - try { - transactions = await sepoliaDataService.getTransactions({ - chainId: Number(data.sepolia_chainid), - account: data.sender, - page: 1, - limit: 10, - }); - - if (userOpsReceipt != null) { - try { - assert.isNumber( - transactions.transactions[0].chainId, - message.vali_getTransactions_chainId - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].blockNumber, - message.vali_getTransactions_blockNumber - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].sender, - message.vali_getTransactions_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].target, - message.vali_getTransactions_target - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].transactionHash, - message.vali_getTransactions_transactionHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].userOpHash, - message.vali_getTransactions_userOpHash - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasCost, - message.vali_getTransactions_actualGasCost - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].actualGasUsed, - message.vali_getTransactions_actualGasUsed - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].success, - message.vali_getTransactions_success - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].timestamp, - message.vali_getTransactions_timestamp - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].paymaster, - message.vali_getTransactions_paymaster - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].value, - message.vali_getTransactions_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].blockExplorerUrl, - message.vali_getTransactions_blockExplorerUrl - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].input, - message.vali_getTransactions_input - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].nonce, - message.vali_getTransactions_nonce - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].initCode, - message.vali_getTransactions_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].callData, - message.vali_getTransactions_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].verificationGasLimit, - message.vali_getTransactions_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].preVerificationGas, - message.vali_getTransactions_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxFeePerGas, - message.vali_getTransactions_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].maxPriorityFeePerGas, - message.vali_getTransactions_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].from, - message.vali_getTransactions_erc20Transfers_from - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].to, - message.vali_getTransactions_erc20Transfers_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].value, - message.vali_getTransactions_erc20Transfers_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].asset, - message.vali_getTransactions_erc20Transfers_asset - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].address, - message.vali_getTransactions_erc20Transfers_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNumber( - transactions.transactions[0].erc20Transfers[0].decimal, - message.vali_getTransactions_erc20Transfers_decimal - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactions.transactions[0].erc20Transfers[0].data, - message.vali_getTransactions_erc20Transfers_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } else { - addContext(test, message.vali_getTransactions_1); - console.log(message.vali_getTransactions_1); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction history response with invalid hash on sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // Fetching a single transaction - let transaction; - try { - transaction = await sepoliaDataService.getTransaction({ - hash: data.incorrect_hash, // Incorrect Transaction Hash - chainId: Number(data.sepolia_chainid), - }); - - if (transaction === null || Object.keys(transaction).length === 0) { - addContext(test, message.vali_getTransactions_2); - console.log(message.vali_getTransactions_2); - } else { - addContext(test, message.fail_getTransactions_6); - assert.fail(message.fail_getTransactions_6); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_6); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transaction history response when hash hex is not with 32 size on sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // Fetching a single transaction - try { - await sepoliaDataService.getTransaction({ - hash: data.invalid_hash, // Invalid Transaction Hash - chainId: Number(data.sepolia_chainid), - }); - - addContext(test, message.fail_getTransactions_7); - assert.fail(message.fail_getTransactions_7); - } catch (e) { - if (e.errors[0].constraints.isHex === constant.hash_32) { - addContext(test, message.vali_getTransactions_3); - console.log(message.vali_getTransactions_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with invalid chainid in sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let transactions = await sepoliaDataService.getTransactions({ - chainId: Number(data.invalid_sepolia_chainid), - account: data.sender, - }); - - if (transactions.transactions.length === 0) { - addContext(test, message.vali_getTransactions_4); - console.log(message.vali_getTransactions_4); - } else { - addContext(test, message.fail_getTransactions_8); - assert.fail(message.fail_getTransactions_8); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with invalid account in sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - let a = await sepoliaDataService.getTransactions({ - chainId: Number(data.sepolia_chainid), - account: data.invalidSender, - }); - - addContext(test, message.fail_getTransactions_10); - assert.fail(message.fail_getTransactions_10); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_getTransactions_6); - console.log(message.vali_getTransactions_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Validate the get transactions history response with incorrect account in sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - try { - await sepoliaDataService.getTransactions({ - chainId: Number(data.sepolia_chainid), - account: data.incorrectSender, - }); - - addContext(test, message.fail_getTransactions_11); - assert.fail(message.fail_getTransactions_11); - } catch (e) { - const errorResponse = JSON.parse(e.message); - if ( - errorResponse[0].constraints.isAddress === - constant.invalid_address_1 - ) { - addContext(test, message.vali_getTransactions_7); - console.log(message.vali_getTransactions_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getTransactions_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.getTransaction_insufficientBalance); - console.warn(message.getTransaction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/specs/testnet/transferringFunds/newWallet_transferringFunds.spec.js b/test/specs/testnet/transferringFunds/newWallet_transferringFunds.spec.js new file mode 100644 index 0000000..2ac0ef1 --- /dev/null +++ b/test/specs/testnet/transferringFunds/newWallet_transferringFunds.spec.js @@ -0,0 +1,3856 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers, utils, providers } from 'ethers'; +import { assert } from 'chai'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidProviderNetwork, + randomInvalidTokenAddress, + randomOtherProviderNetwork, + randomProviderNetwork, + randomTokenAddress, +} from '../../../utils/sharedData_testnet.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import abi from '../../../data/nftabi.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; +import { dirname } from 'path'; +import { fileURLToPath } from 'url'; +import fs from 'fs'; +import path from 'path'; + +let testnetPrimeSdk; +let nativeAddress = null; +let dataService; +let runTest; +const __dirname = dirname(fileURLToPath(import.meta.url)); + +describe('Perform the transaction of the tokens on the TestNet (with new wallet)', function () { + before(async function () { + const filePath = path.join(__dirname, '../../../utils/testUtils.json'); + const sharedState = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: sharedState.newPrivateKey }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }); + + it( + 'SMOKE: Perform the transfer native token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the transfer ERC20 token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + + try { + assert.isTrue( + provider._isProvider, + message.vali_erc20Transfer_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + + try { + assert.isNotEmpty( + transactionData, + message.vali_erc20Contract_transferFrom + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_transferFrom); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: transactionData, + }); + + try { + assert.isNotEmpty( + userOpsBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.value[0], + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the transfer ERC721 NFT token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get erc721 Contract Interface + let erc721Interface; + let erc721Data; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Data = erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, + data.tokenId, + ]); + + try { + assert.isNotEmpty( + erc721Data, + message.vali_erc721Transfer_contractInterface + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_contractInterface); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.nft_tokenAddress, + data: erc721Data, + }); + + try { + assert.isNotEmpty( + userOpsBatch.to[0], + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.data[0], + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.value[0], + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + let op; + try { + op = await testnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Perform the transfer native token by passing callGasLimit with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + // passing callGasLimit as 40000 to manually set it + let op; + try { + op = await testnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the concurrent userops with valid details on the ' + + randomChainName + + ' network', + async function () { + // NOTE: assume the sender wallet is deployed + + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + const provider = new providers.JsonRpcProvider(); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) + // Staked entities can send as many userops as they want + let concurrentUseropsCount = 1; + const userops = []; + const uoHashes = []; + + try { + while (--concurrentUseropsCount >= 0) { + const op = await testnetPrimeSdk.estimate({ + key: concurrentUseropsCount, + }); + userops.push(op); + } + + console.log('Sending userops...'); + for (const op of userops) { + const uoHash = await testnetPrimeSdk.send(op); + uoHashes.push(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + try { + console.log('Waiting for transactions...'); + const userOpsReceipts = new Array(uoHashes.length).fill(null); + const timeout = Date.now() + 60000; // 1 minute timeout + while ( + userOpsReceipts.some((receipt) => receipt == null) && + Date.now() < timeout + ) { + helper.wait(2000); + for (let i = 0; i < uoHashes.length; ++i) { + if (userOpsReceipts[i]) continue; + const uoHash = uoHashes[i]; + userOpsReceipts[i] = + await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } + + if (userOpsReceipts.some((receipt) => receipt != null)) { + for (const uoReceipt of userOpsReceipts) { + if (!uoReceipt) continue; + addContext(test, message.vali_submitTransaction_1); + console.log(message.vali_submitTransaction_1); + } + } else { + addContext(test, message.vali_submitTransaction_2); + console.log(message.vali_submitTransaction_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getUserOpReceipt_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the incorrect To Address while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.incorrectRecipient, // incorrect to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_9); + assert.fail(message.fail_estimateTransaction_9); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_estimateTransaction_8); + console.log(message.vali_estimateTransaction_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.invalidRecipient, // invalid to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_10); + assert.fail(message.fail_estimateTransaction_10); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_9); + console.log(message.vali_estimateTransaction_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the invalid Value while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseUnits(data.invalidValue), // invalid value + }); + + addContext(test, message.fail_estimateTransaction_11); + assert.fail(message.fail_estimateTransaction_11); + } catch (e) { + if (e.reason === constant.invalid_value_1) { + addContext(test, message.vali_estimateTransaction_10); + console.log(message.vali_estimateTransaction_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the very small Value while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseUnits(data.smallValue), // very small value + }); + + addContext(test, message.fail_estimateTransaction_12); + assert.fail(message.fail_estimateTransaction_12); + } catch (e) { + if (e.reason === constant.invalid_value_2) { + addContext(test, message.vali_estimateTransaction_11); + console.log(message.vali_estimateTransaction_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_12); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_13); + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.invalid_parameter) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer native token by passing callGasLimit with the incorrect To Address while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.incorrectRecipient, // incorrect to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + // passing callGasLimit as 40000 to manually set it + try { + await testnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + addContext(test, message.fail_estimateTransaction_9); + assert.fail(message.fail_estimateTransaction_9); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_estimateTransaction_8); + console.log(message.vali_estimateTransaction_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer native token by passing callGasLimit with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.invalidRecipient, // invalid to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + // passing callGasLimit as 40000 to manually set it + try { + await testnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + addContext(test, message.fail_estimateTransaction_10); + assert.fail(message.fail_estimateTransaction_10); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_8); + console.log(message.vali_estimateTransaction_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer native token by passing callGasLimit without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + // passing callGasLimit as 40000 to manually set it + try { + await testnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + addContext(test, message.fail_estimateTransaction_13); + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.empty_batch) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid provider netowrk details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomInvalidProviderNetwork // invalid provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without provider netowrk details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider(); // without provider + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with other provider netowrk details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomOtherProviderNetwork // other provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with incorrect Token Address details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomIncorrectTokenAddress, // incorrect token address + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid Token Address i.e. missing character details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomInvalidTokenAddress, // invalid token address + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with null Token Address details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + try { + new ethers.Contract(null, ERC20_ABI, provider); // null token address + + addContext(test, message.fail_erc20Transfer_3); + assert.fail(message.fail_erc20Transfer_3); + } catch (e) { + if (e.reason === constant.contract_address_2) { + addContext(test, message.vali_erc20Transfer_3); + console.log(message.vali_erc20Transfer_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with incorrect transfer method name while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid value while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ethers.utils.parseUnits( + data.invalidValue, + data.erc20_usdc_decimal + ), // invalid value + ]); + + addContext(test, message.fail_erc20Transfer_5); + assert.fail(message.fail_erc20Transfer_5); + } catch (e) { + if (e.reason === constant.invalid_value_1) { + addContext(test, message.vali_erc20Transfer_5); + console.log(message.vali_erc20Transfer_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with very small value while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ethers.utils.parseUnits(data.smallValue, data.erc20_usdc_decimal), // very small value + ]); + + addContext(test, message.fail_erc20Transfer_6); + assert.fail(message.fail_erc20Transfer_6); + } catch (e) { + if (e.reason === constant.invalid_value_2) { + addContext(test, message.vali_erc20Transfer_6); + console.log(message.vali_erc20Transfer_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without value while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ]); + + addContext(test, message.fail_erc20Transfer_7); + assert.fail(message.fail_erc20Transfer_7); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc20Transfer_7); + console.log(message.vali_erc20Transfer_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with incorrect recipient while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.incorrectRecipient, // incorrect recipient address + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_8); + assert.fail(message.fail_erc20Transfer_8); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc20Transfer_8); + console.log(message.vali_erc20Transfer_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid recipient i.e. missing character while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.invalidRecipient, // invalid recipient address + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_9); + assert.fail(message.fail_erc20Transfer_9); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc20Transfer_9); + console.log(message.vali_erc20Transfer_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without recipient while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_10); + assert.fail(message.fail_erc20Transfer_10); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc20Transfer_10); + console.log(message.vali_erc20Transfer_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with the incorrect Token Address while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomIncorrectTokenAddress, // Incorrect Token Address + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_16); + assert.fail(message.fail_estimateTransaction_16); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_estimateTransaction_15); + console.log(message.vali_estimateTransaction_15); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_16); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with the invalid Token Address i.e. missing character while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomInvalidTokenAddress, // Invalid Token Address + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_17); + assert.fail(message.fail_estimateTransaction_17); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_16); + console.log(message.vali_estimateTransaction_16); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_17); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with the null Token Address while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: null, // Null Token Address + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_18); + assert.fail(message.fail_estimateTransaction_18); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_17); + console.log(message.vali_estimateTransaction_17); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_18); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without Token Address while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + data: transactionData, // without tokenAddress + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_19); + assert.fail(message.fail_estimateTransaction_19); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_18); + console.log(message.vali_estimateTransaction_18); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_19); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.invalid_parameter) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with incorrect Sender Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.incorrectSender, // incorrect sender address + data.recipient, + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_1); + assert.fail(message.fail_erc721Transfer_1); + } catch (e) { + if (e.reason.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc721Transfer_1); + console.log(message.vali_erc721Transfer_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with invalid Sender Address i.e. missing character while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.invalidSender, // invalid sender address + data.recipient, + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_2); + assert.fail(message.fail_erc721Transfer_2); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc721Transfer_2); + console.log(message.vali_erc721Transfer_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token without Sender Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.recipient, // not added sender address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_3); + assert.fail(message.fail_erc721Transfer_3); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc721Transfer_3); + console.log(message.vali_erc721Transfer_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with incorrect Recipient Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.incorrectRecipient, // incorrect recipient address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_4); + assert.fail(message.fail_erc721Transfer_4); + } catch (e) { + if (e.reason.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc721Transfer_4); + console.log(message.vali_erc721Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with invalid Recipient Address i.e. missing character while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.invalidRecipient, // invalid recipient address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_5); + assert.fail(message.fail_erc721Transfer_5); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc721Transfer_5); + console.log(message.vali_erc721Transfer_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token without Recipient Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, // not added recipient address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_6); + assert.fail(message.fail_erc721Transfer_6); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc721Transfer_6); + console.log(message.vali_erc721Transfer_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with incorrect tokenId while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, + data.incorrectTokenId, // incorrect tokenid + ]); + + addContext(message.fail_erc721Transfer_7); + assert.fail(message.fail_erc721Transfer_7); + } catch (e) { + if (e.reason === constant.invalid_bignumber_1) { + addContext(test, message.vali_erc721Transfer_7); + console.log(message.vali_erc721Transfer_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token without tokenId while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, // not added tokenid + ]); + + addContext(test, message.fail_erc721Transfer_8); + assert.fail(message.fail_erc721Transfer_8); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc721Transfer_8); + console.log(message.vali_erc721Transfer_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT Token without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, + data.tokenId, + ]); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_contractInterface); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_13); + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.invalid_parameter) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the concurrent userops with invalid concurrentUseropsCount on the ' + + randomChainName + + ' network', + async function () { + // NOTE: assume the sender wallet is deployed + + var test = this; + if (runTest) { + await customRetryAsync(async function () { + const provider = new providers.JsonRpcProvider(); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) + // Staked entities can send as many userops as they want + let concurrentUseropsCount = -5; // invalid concurrent userops + const userops = []; + const uoHashes = []; + + try { + while (--concurrentUseropsCount >= 0) { + const op = await testnetPrimeSdk.estimate({ + key: concurrentUseropsCount, + }); + userops.push(op); + } + + console.log('Sending userops...'); + for (const op of userops) { + const uoHash = await testnetPrimeSdk.send(op); + uoHashes.push(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + try { + console.log('Waiting for transactions...'); + const userOpsReceipts = new Array(uoHashes.length).fill(null); + const timeout = Date.now() + 60000; // 1 minute timeout + while ( + userOpsReceipts.some((receipt) => receipt == null) && + Date.now() < timeout + ) { + helper.wait(2000); + for (let i = 0; i < uoHashes.length; ++i) { + if (userOpsReceipts[i]) continue; + const uoHash = uoHashes[i]; + userOpsReceipts[i] = + await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } + + if (userOpsReceipts.some((receipt) => receipt != null)) { + for (const uoReceipt of userOpsReceipts) { + if (!uoReceipt) continue; + addContext(test, message.vali_submitTransaction_1); + console.log(message.vali_submitTransaction_1); + } + } else { + addContext(test, message.vali_submitTransaction_2); + console.log(message.vali_submitTransaction_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getUserOpReceipt_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the concurrent userops without concurrentUseropsCount on the ' + + randomChainName + + ' network', + async function () { + // NOTE: assume the sender wallet is deployed + + var test = this; + if (runTest) { + await customRetryAsync(async function () { + const provider = new providers.JsonRpcProvider(); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) + // Staked entities can send as many userops as they want + let concurrentUseropsCount; // invalid concurrent userops + const userops = []; + const uoHashes = []; + + try { + while (--concurrentUseropsCount >= 0) { + const op = await testnetPrimeSdk.estimate({ + key: concurrentUseropsCount, + }); + userops.push(op); + } + + console.log('Sending userops...'); + for (const op of userops) { + const uoHash = await testnetPrimeSdk.send(op); + uoHashes.push(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + try { + console.log('Waiting for transactions...'); + const userOpsReceipts = new Array(uoHashes.length).fill(null); + const timeout = Date.now() + 60000; // 1 minute timeout + while ( + userOpsReceipts.some((receipt) => receipt == null) && + Date.now() < timeout + ) { + helper.wait(2000); + for (let i = 0; i < uoHashes.length; ++i) { + if (userOpsReceipts[i]) continue; + const uoHash = uoHashes[i]; + userOpsReceipts[i] = + await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } + + if (userOpsReceipts.some((receipt) => receipt != null)) { + for (const uoReceipt of userOpsReceipts) { + if (!uoReceipt) continue; + addContext(test, message.vali_submitTransaction_1); + console.log(message.vali_submitTransaction_1); + } + } else { + addContext(test, message.vali_submitTransaction_2); + console.log(message.vali_submitTransaction_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getUserOpReceipt_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the concurrent userops with non deployed address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + const provider = new providers.JsonRpcProvider(); + + try { + if ((await provider.getCode(data.eoaAddress)).length <= 2) { + addContext(test, message.vali_deployAddress_1); + console.log(message.vali_deployAddress_1); + return; + } + + addContext(test, message.fail_deployAddress_1); + assert.fail(message.fail_deployAddress_1); + } catch (e) { + const errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_2)) { + addContext(test, message.vali_deployAddress_2); + console.log(message.vali_deployAddress_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployAddress_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/testnet/transferringFunds/oldWallet_transferringFunds.spec.js b/test/specs/testnet/transferringFunds/oldWallet_transferringFunds.spec.js new file mode 100644 index 0000000..9ee7565 --- /dev/null +++ b/test/specs/testnet/transferringFunds/oldWallet_transferringFunds.spec.js @@ -0,0 +1,3888 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { PrimeSdk, DataUtils, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers, utils, providers } from 'ethers'; +import { assert } from 'chai'; +import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; +import addContext from 'mochawesome/addContext.js'; +import customRetryAsync from '../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomIncorrectTokenAddress, + randomInvalidProviderNetwork, + randomInvalidTokenAddress, + randomOtherProviderNetwork, + randomProviderNetwork, + randomTokenAddress, +} from '../../../utils/sharedData_testnet.js'; +import helper from '../../../utils/helper.js'; +import data from '../../../data/testData.json' assert { type: 'json' }; +import abi from '../../../data/nftabi.json' assert { type: 'json' }; +import constant from '../../../data/constant.json' assert { type: 'json' }; +import message from '../../../data/messages.json' assert { type: 'json' }; + +let testnetPrimeSdk; +let etherspotWalletAddress; +let nativeAddress = null; +let dataService; +let runTest; + +describe('Perform the transaction of the tokens on the TestNet (with old wallet)', function () { + before(async function () { + var test = this; + + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + + try { + assert.strictEqual( + testnetPrimeSdk.state.EOAAddress, + data.eoaAddress, + message.vali_eoa_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + // get EtherspotWallet address + try { + etherspotWalletAddress = + await testnetPrimeSdk.getCounterFactualAddress(); + + try { + assert.strictEqual( + etherspotWalletAddress, + data.sender, + message.vali_smart_address + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e.message); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_smart_address); + } + + // initializating Data service... + try { + dataService = new DataUtils(process.env.DATA_API_KEY); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_data_service); + } + }, data.retry); // Retry this async test up to 5 times + }); + + beforeEach(async function () { + var test = this; + + await customRetryAsync(async function () { + // validate the balance of the wallet + try { + let output = await dataService.getAccountBalances({ + account: data.sender, + chainId: Number(randomChainId), + }); + let native_balance; + let usdc_balance; + let native_final; + let usdc_final; + + for (let i = 0; i < output.items.length; i++) { + let tokenAddress = output.items[i].token; + if (tokenAddress === nativeAddress) { + native_balance = output.items[i].balance; + native_final = utils.formatUnits(native_balance, 18); + } else if (tokenAddress === randomTokenAddress) { + usdc_balance = output.items[i].balance; + usdc_final = utils.formatUnits(usdc_balance, 6); + } + } + + if ( + native_final > data.minimum_native_balance && + usdc_final > data.minimum_token_balance + ) { + runTest = true; + } else { + runTest = false; + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_wallet_balance); + } + }, data.retry); // Retry this async test up to 5 times + }); + + it( + 'SMOKE: Perform the transfer native token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the transfer ERC20 token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + + try { + assert.isTrue( + provider._isProvider, + message.vali_erc20Transfer_provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + + try { + assert.isNotEmpty( + transactionData, + message.vali_erc20Contract_transferFrom + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_transferFrom); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: randomTokenAddress, + data: transactionData, + }); + + try { + assert.isNotEmpty( + userOpsBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.value[0], + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the transfer ERC721 NFT token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // get erc721 Contract Interface + let erc721Interface; + let erc721Data; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Data = erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, + data.tokenId, + ]); + + try { + assert.isNotEmpty( + erc721Data, + message.vali_erc721Transfer_contractInterface + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_contractInterface); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let userOpsBatch; + try { + userOpsBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.nft_tokenAddress, + data: erc721Data, + }); + + try { + assert.isNotEmpty( + userOpsBatch.to[0], + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.data[0], + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + userOpsBatch.value[0], + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + let op; + try { + op = await testnetPrimeSdk.estimate(); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'SMOKE: Perform the transfer native token by passing callGasLimit with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + // passing callGasLimit as 40000 to manually set it + let op; + try { + op = await testnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + try { + assert.isNotEmpty( + op.sender, + message.vali_estimateTransaction_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.nonce, + message.vali_estimateTransaction_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.initCode, + message.vali_estimateTransaction_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callData, + message.vali_estimateTransaction_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.callGasLimit, + message.vali_estimateTransaction_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.verificationGasLimit, + message.vali_estimateTransaction_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxFeePerGas, + message.vali_estimateTransaction_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.maxPriorityFeePerGas, + message.vali_estimateTransaction_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.paymasterAndData, + message.vali_estimateTransaction_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.preVerificationGas, + message.vali_estimateTransaction_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + op.signature, + message.vali_estimateTransaction_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + let uoHash; + try { + uoHash = await testnetPrimeSdk.send(op); + + try { + assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'SMOKE: Perform the concurrent userops with valid details on the ' + + randomChainName + + ' network', + async function () { + // NOTE: assume the sender wallet is deployed + + var test = this; + if (runTest) { + await customRetryAsync(async function () { + helper.wait(data.mediumTimeout); + + const provider = new providers.JsonRpcProvider(); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + + try { + assert.isNotEmpty( + transactionBatch.to, + message.vali_addTransaction_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.data, + message.vali_addTransaction_data + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + transactionBatch.value, + message.vali_addTransaction_value + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + + try { + assert.isNotEmpty(balance, message.vali_getBalance_balance); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) + // Staked entities can send as many userops as they want + let concurrentUseropsCount = 1; + const userops = []; + const uoHashes = []; + + try { + while (--concurrentUseropsCount >= 0) { + const op = await testnetPrimeSdk.estimate({ + key: concurrentUseropsCount, + }); + userops.push(op); + } + + console.log('Sending userops...'); + for (const op of userops) { + const uoHash = await testnetPrimeSdk.send(op); + uoHashes.push(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + try { + console.log('Waiting for transactions...'); + const userOpsReceipts = new Array(uoHashes.length).fill(null); + const timeout = Date.now() + 60000; // 1 minute timeout + while ( + userOpsReceipts.some((receipt) => receipt == null) && + Date.now() < timeout + ) { + helper.wait(2000); + for (let i = 0; i < uoHashes.length; ++i) { + if (userOpsReceipts[i]) continue; + const uoHash = uoHashes[i]; + userOpsReceipts[i] = + await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } + + if (userOpsReceipts.some((receipt) => receipt != null)) { + for (const uoReceipt of userOpsReceipts) { + if (!uoReceipt) continue; + addContext(test, message.vali_submitTransaction_1); + console.log(message.vali_submitTransaction_1); + } + } else { + addContext(test, message.vali_submitTransaction_2); + console.log(message.vali_submitTransaction_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getUserOpReceipt_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the incorrect To Address while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.incorrectRecipient, // incorrect to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_9); + assert.fail(message.fail_estimateTransaction_9); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_estimateTransaction_8); + console.log(message.vali_estimateTransaction_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.invalidRecipient, // invalid to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_10); + assert.fail(message.fail_estimateTransaction_10); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_9); + console.log(message.vali_estimateTransaction_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the invalid Value while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseUnits(data.invalidValue), // invalid value + }); + + addContext(test, message.fail_estimateTransaction_11); + assert.fail(message.fail_estimateTransaction_11); + } catch (e) { + if (e.reason === constant.invalid_value_1) { + addContext(test, message.vali_estimateTransaction_10); + console.log(message.vali_estimateTransaction_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_11); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token with the very small Value while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseUnits(data.smallValue), // very small value + }); + + addContext(test, message.fail_estimateTransaction_12); + assert.fail(message.fail_estimateTransaction_12); + } catch (e) { + if (e.reason === constant.invalid_value_2) { + addContext(test, message.vali_estimateTransaction_11); + console.log(message.vali_estimateTransaction_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_12); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer native token without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_13); + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.invalid_parameter) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer native token by passing callGasLimit with the incorrect To Address while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.incorrectRecipient, // incorrect to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + // passing callGasLimit as 40000 to manually set it + try { + await testnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + addContext(test, message.fail_estimateTransaction_9); + assert.fail(message.fail_estimateTransaction_9); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_estimateTransaction_8); + console.log(message.vali_estimateTransaction_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer native token by passing callGasLimit with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: data.invalidRecipient, // invalid to address + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + // passing callGasLimit as 40000 to manually set it + try { + await testnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + addContext(test, message.fail_estimateTransaction_10); + assert.fail(message.fail_estimateTransaction_10); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_8); + console.log(message.vali_estimateTransaction_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + xit( + 'REGRESSION: Perform the transfer native token by passing callGasLimit without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + // passing callGasLimit as 40000 to manually set it + try { + await testnetPrimeSdk.estimate({ callGasLimit: 40000 }); + + addContext(test, message.fail_estimateTransaction_13); + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.empty_batch) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid provider netowrk details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomInvalidProviderNetwork // invalid provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without provider netowrk details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider(); // without provider + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with other provider netowrk details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomOtherProviderNetwork // other provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with incorrect Token Address details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomIncorrectTokenAddress, // incorrect token address + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid Token Address i.e. missing character details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomInvalidTokenAddress, // invalid token address + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with null Token Address details while Getting the Decimal from ERC20 Contract on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + try { + new ethers.Contract(null, ERC20_ABI, provider); // null token address + + addContext(test, message.fail_erc20Transfer_3); + assert.fail(message.fail_erc20Transfer_3); + } catch (e) { + if (e.reason === constant.contract_address_2) { + addContext(test, message.vali_erc20Transfer_3); + console.log(message.vali_erc20Transfer_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with incorrect transfer method name while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transferr', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_4); + assert.fail(message.fail_erc20Transfer_4); + } catch (e) { + if (e.reason === constant.no_function) { + addContext(test, message.vali_erc20Transfer_4); + console.log(message.vali_erc20Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid value while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ethers.utils.parseUnits( + data.invalidValue, + data.erc20_usdc_decimal + ), // invalid value + ]); + + addContext(test, message.fail_erc20Transfer_5); + assert.fail(message.fail_erc20Transfer_5); + } catch (e) { + if (e.reason === constant.invalid_value_1) { + addContext(test, message.vali_erc20Transfer_5); + console.log(message.vali_erc20Transfer_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with very small value while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ethers.utils.parseUnits(data.smallValue, data.erc20_usdc_decimal), // very small value + ]); + + addContext(test, message.fail_erc20Transfer_6); + assert.fail(message.fail_erc20Transfer_6); + } catch (e) { + if (e.reason === constant.invalid_value_2) { + addContext(test, message.vali_erc20Transfer_6); + console.log(message.vali_erc20Transfer_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without value while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ]); + + addContext(test, message.fail_erc20Transfer_7); + assert.fail(message.fail_erc20Transfer_7); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc20Transfer_7); + console.log(message.vali_erc20Transfer_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with incorrect recipient while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.incorrectRecipient, // incorrect recipient address + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_8); + assert.fail(message.fail_erc20Transfer_8); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc20Transfer_8); + console.log(message.vali_erc20Transfer_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with invalid recipient i.e. missing character while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.invalidRecipient, // invalid recipient address + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_9); + assert.fail(message.fail_erc20Transfer_9); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc20Transfer_9); + console.log(message.vali_erc20Transfer_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_9); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without recipient while Getting the transferFrom encoded data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + + addContext(test, message.fail_erc20Transfer_10); + assert.fail(message.fail_erc20Transfer_10); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc20Transfer_10); + console.log(message.vali_erc20Transfer_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_10); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with the incorrect Token Address while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomIncorrectTokenAddress, // Incorrect Token Address + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_16); + assert.fail(message.fail_estimateTransaction_16); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_6)) { + addContext(test, message.vali_estimateTransaction_15); + console.log(message.vali_estimateTransaction_15); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_16); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with the invalid Token Address i.e. missing character while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: randomInvalidTokenAddress, // Invalid Token Address + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_17); + assert.fail(message.fail_estimateTransaction_17); + } catch (e) { + let error = e.reason; + if (error.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_16); + console.log(message.vali_estimateTransaction_16); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_17); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token with the null Token Address while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + to: null, // Null Token Address + data: transactionData, + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_18); + assert.fail(message.fail_estimateTransaction_18); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_17); + console.log(message.vali_estimateTransaction_17); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_18); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without Token Address while adding transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + let transactionData; + try { + transactionData = erc20Instance.interface.encodeFunctionData( + 'transfer', + [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ] + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + try { + await testnetPrimeSdk.addUserOpsToBatch({ + data: transactionData, // without tokenAddress + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_19); + assert.fail(message.fail_estimateTransaction_19); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_estimateTransaction_18); + console.log(message.vali_estimateTransaction_18); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_19); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC20 token without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get the respective provider details + let provider; + try { + provider = new ethers.providers.JsonRpcProvider( + randomProviderNetwork + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_provider); + } + + // get erc20 Contract Interface + let erc20Instance; + try { + erc20Instance = new ethers.Contract( + randomTokenAddress, + ERC20_ABI, + provider + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Transfer_contractInterface); + } + + // get transferFrom encoded data + try { + erc20Instance.interface.encodeFunctionData('transfer', [ + data.recipient, + ethers.utils.parseUnits( + data.erc20_value, + data.erc20_usdc_decimal + ), + ]); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc20Contract_decimals); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.invalid_parameter) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc20Transaction_insufficientBalance); + console.warn(message.erc20Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with incorrect Sender Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.incorrectSender, // incorrect sender address + data.recipient, + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_1); + assert.fail(message.fail_erc721Transfer_1); + } catch (e) { + if (e.reason.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc721Transfer_1); + console.log(message.vali_erc721Transfer_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with invalid Sender Address i.e. missing character while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.invalidSender, // invalid sender address + data.recipient, + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_2); + assert.fail(message.fail_erc721Transfer_2); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc721Transfer_2); + console.log(message.vali_erc721Transfer_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_2); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token without Sender Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.recipient, // not added sender address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_3); + assert.fail(message.fail_erc721Transfer_3); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc721Transfer_3); + console.log(message.vali_erc721Transfer_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_3); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with incorrect Recipient Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.incorrectRecipient, // incorrect recipient address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_4); + assert.fail(message.fail_erc721Transfer_4); + } catch (e) { + if (e.reason.includes(constant.invalid_address_6)) { + addContext(test, message.vali_erc721Transfer_4); + console.log(message.vali_erc721Transfer_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_4); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with invalid Recipient Address i.e. missing character while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.invalidRecipient, // invalid recipient address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_5); + assert.fail(message.fail_erc721Transfer_5); + } catch (e) { + if (e.reason.includes(constant.invalid_address_4)) { + addContext(test, message.vali_erc721Transfer_5); + console.log(message.vali_erc721Transfer_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_5); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token without Recipient Address while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, // not added recipient address + data.tokenId, + ]); + + addContext(test, message.fail_erc721Transfer_6); + assert.fail(message.fail_erc721Transfer_6); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc721Transfer_6); + console.log(message.vali_erc721Transfer_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_6); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token with incorrect tokenId while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, + data.incorrectTokenId, // incorrect tokenid + ]); + + addContext(message.fail_erc721Transfer_7); + assert.fail(message.fail_erc721Transfer_7); + } catch (e) { + if (e.reason === constant.invalid_bignumber_1) { + addContext(test, message.vali_erc721Transfer_7); + console.log(message.vali_erc721Transfer_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_7); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT token without tokenId while creating the NFT Data on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, // not added tokenid + ]); + + addContext(test, message.fail_erc721Transfer_8); + assert.fail(message.fail_erc721Transfer_8); + } catch (e) { + if (e.reason === constant.invalid_value_4) { + addContext(test, message.vali_erc721Transfer_8); + console.log(message.vali_erc721Transfer_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_8); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the transfer ERC721 NFT Token without adding transaction to the batch while estimate the added transactions to the batch on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + // get erc721 Contract Interface + let erc721Interface; + try { + erc721Interface = new ethers.utils.Interface(abi.abi); + + erc721Interface.encodeFunctionData('transferFrom', [ + data.sender, + data.recipient, + data.tokenId, + ]); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_erc721Transfer_contractInterface); + } + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // get balance of the account address + try { + await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch + try { + await testnetPrimeSdk.estimate(); + + addContext(test, message.fail_estimateTransaction_13); + assert.fail(message.fail_estimateTransaction_13); + } catch (e) { + if (e.message === constant.invalid_parameter) { + addContext(test, message.vali_estimateTransaction_12); + console.log(message.vali_estimateTransaction_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_13); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.erc721Transaction_insufficientBalance); + console.warn(message.erc721Transaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the concurrent userops with invalid concurrentUseropsCount on the ' + + randomChainName + + ' network', + async function () { + // NOTE: assume the sender wallet is deployed + + var test = this; + if (runTest) { + await customRetryAsync(async function () { + const provider = new providers.JsonRpcProvider(); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) + // Staked entities can send as many userops as they want + let concurrentUseropsCount = -5; // invalid concurrent userops + const userops = []; + const uoHashes = []; + + try { + while (--concurrentUseropsCount >= 0) { + const op = await testnetPrimeSdk.estimate({ + key: concurrentUseropsCount, + }); + userops.push(op); + } + + console.log('Sending userops...'); + for (const op of userops) { + const uoHash = await testnetPrimeSdk.send(op); + uoHashes.push(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + try { + console.log('Waiting for transactions...'); + const userOpsReceipts = new Array(uoHashes.length).fill(null); + const timeout = Date.now() + 60000; // 1 minute timeout + while ( + userOpsReceipts.some((receipt) => receipt == null) && + Date.now() < timeout + ) { + helper.wait(2000); + for (let i = 0; i < uoHashes.length; ++i) { + if (userOpsReceipts[i]) continue; + const uoHash = uoHashes[i]; + userOpsReceipts[i] = + await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } + + if (userOpsReceipts.some((receipt) => receipt != null)) { + for (const uoReceipt of userOpsReceipts) { + if (!uoReceipt) continue; + addContext(test, message.vali_submitTransaction_1); + console.log(message.vali_submitTransaction_1); + } + } else { + addContext(test, message.vali_submitTransaction_2); + console.log(message.vali_submitTransaction_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getUserOpReceipt_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the concurrent userops without concurrentUseropsCount on the ' + + randomChainName + + ' network', + async function () { + // NOTE: assume the sender wallet is deployed + + var test = this; + if (runTest) { + await customRetryAsync(async function () { + const provider = new providers.JsonRpcProvider(); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) + // Staked entities can send as many userops as they want + let concurrentUseropsCount; // invalid concurrent userops + const userops = []; + const uoHashes = []; + + try { + while (--concurrentUseropsCount >= 0) { + const op = await testnetPrimeSdk.estimate({ + key: concurrentUseropsCount, + }); + userops.push(op); + } + + console.log('Sending userops...'); + for (const op of userops) { + const uoHash = await testnetPrimeSdk.send(op); + uoHashes.push(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + try { + console.log('Waiting for transactions...'); + const userOpsReceipts = new Array(uoHashes.length).fill(null); + const timeout = Date.now() + 60000; // 1 minute timeout + while ( + userOpsReceipts.some((receipt) => receipt == null) && + Date.now() < timeout + ) { + helper.wait(2000); + for (let i = 0; i < uoHashes.length; ++i) { + if (userOpsReceipts[i]) continue; + const uoHash = uoHashes[i]; + userOpsReceipts[i] = + await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } + + if (userOpsReceipts.some((receipt) => receipt != null)) { + for (const uoReceipt of userOpsReceipts) { + if (!uoReceipt) continue; + addContext(test, message.vali_submitTransaction_1); + console.log(message.vali_submitTransaction_1); + } + } else { + addContext(test, message.vali_submitTransaction_2); + console.log(message.vali_submitTransaction_2); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getUserOpReceipt_1); + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); + + it( + 'REGRESSION: Perform the concurrent userops with non deployed address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + if (runTest) { + await customRetryAsync(async function () { + const provider = new providers.JsonRpcProvider(); + + try { + if ((await provider.getCode(data.eoaAddress)).length <= 2) { + addContext(test, message.vali_deployAddress_1); + console.log(message.vali_deployAddress_1); + return; + } + + addContext(test, message.fail_deployAddress_1); + assert.fail(message.fail_deployAddress_1); + } catch (e) { + const errorMessage = e.message; + if (errorMessage.includes(constant.invalid_network_2)) { + addContext(test, message.vali_deployAddress_2); + console.log(message.vali_deployAddress_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployAddress_1); + } + } + }, data.retry); // Retry this async test up to 5 times + } else { + addContext(test, message.nativeTransaction_insufficientBalance); + console.warn(message.nativeTransaction_insufficientBalance); + test.skip(); + } + } + ); +}); diff --git a/test/specs/testnet/transferringFunds/sepolia.spec.js b/test/specs/testnet/transferringFunds/sepolia.spec.js deleted file mode 100644 index 084271c..0000000 --- a/test/specs/testnet/transferringFunds/sepolia.spec.js +++ /dev/null @@ -1,3680 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk, DataUtils } from '@etherspot/prime-sdk'; -import { ethers, utils, providers } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import addContext from 'mochawesome/addContext.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import helper from '../../../utils/helper.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; -import abi from '../../../data/nftabi.json' assert { type: 'json' }; -import constant from '../../../data/constant.json' assert { type: 'json' }; -import message from '../../../data/messages.json' assert { type: 'json' }; - -let sepoliaTestNetSdk; -let sepoliaEtherspotWalletAddress; -let sepoliaNativeAddress = null; -let sepoliaDataService; -let runTest; - -describe('The PrimeSDK, when transfer a token with sepolia network on the TestNet', function () { - before(async function () { - var test = this; - - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // initializating sdk - try { - sepoliaTestNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.sepolia_chainid), - } - ); - - try { - assert.strictEqual( - sepoliaTestNetSdk.state.EOAAddress, - data.eoaAddress, - message.vali_eoa_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_sdk_initialize); - } - - // get EtherspotWallet address - try { - sepoliaEtherspotWalletAddress = - await sepoliaTestNetSdk.getCounterFactualAddress(); - - try { - assert.strictEqual( - sepoliaEtherspotWalletAddress, - data.sender, - message.vali_smart_address - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e.message); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_smart_address); - } - - // initializating Data service... - try { - sepoliaDataService = new DataUtils(process.env.DATA_API_KEY); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_data_service); - } - - // validate the balance of the wallet - try { - let output = await sepoliaDataService.getAccountBalances({ - account: data.sender, - chainId: data.sepolia_chainid, - }); - let native_balance; - let usdc_balance; - let native_final; - let usdc_final; - - for (let i = 0; i < output.items.length; i++) { - let tokenAddress = output.items[i].token; - if (tokenAddress === sepoliaNativeAddress) { - native_balance = output.items[i].balance; - native_final = utils.formatUnits(native_balance, 18); - } else if (tokenAddress === data.tokenAddress_sepoliaUSDC) { - usdc_balance = output.items[i].balance; - usdc_final = utils.formatUnits(usdc_balance, 6); - } - } - - if ( - native_final > data.minimum_native_balance && - usdc_final > data.minimum_token_balance - ) { - runTest = true; - } else { - runTest = false; - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_wallet_balance); - } - }, data.retry); // Retry this async test up to 5 times - }); - - it('SMOKE: Perform the transfer native token with valid details on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await sepoliaTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await sepoliaTestNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await sepoliaTestNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer ERC20 token with valid details on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - - try { - assert.isTrue( - provider._isProvider, - message.vali_erc20Transfer_provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - - try { - assert.isNotEmpty(decimals, message.vali_erc20Contract_decimals); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - - try { - assert.isNotEmpty( - transactionData, - message.vali_erc20Contract_transferFrom - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_transferFrom); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: transactionData, - }); - - try { - assert.isNotEmpty(userOpsBatch.to, message.vali_addTransaction_to); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.value[0], - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await sepoliaTestNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await sepoliaTestNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the transfer ERC721 NFT token with valid details on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // get erc721 Contract Interface - let erc721Interface; - let erc721Data; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Data = erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.tokenId, - ]); - - try { - assert.isNotEmpty( - erc721Data, - message.vali_erc721Transfer_contractInterface - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_contractInterface); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let userOpsBatch; - try { - userOpsBatch = await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.nft_tokenAddress, - data: erc721Data, - }); - - try { - assert.isNotEmpty( - userOpsBatch.to[0], - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.data[0], - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - userOpsBatch.value[0], - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - let op; - try { - op = await sepoliaTestNetSdk.estimate(); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sending to the bundler - let uoHash; - try { - uoHash = await sepoliaTestNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - xit('SMOKE: Perform the transfer native token by passing callGasLimit with valid details on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await sepoliaTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - // passing callGasLimit as 40000 to manually set it - let op; - try { - op = await sepoliaTestNetSdk.estimate({ callGasLimit: 40000 }); - - try { - assert.isNotEmpty( - op.sender, - message.vali_estimateTransaction_sender - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty(op.nonce, message.vali_estimateTransaction_nonce); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.initCode, - message.vali_estimateTransaction_initCode - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callData, - message.vali_estimateTransaction_callData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.callGasLimit, - message.vali_estimateTransaction_callGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.verificationGasLimit, - message.vali_estimateTransaction_verificationGasLimit - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxFeePerGas, - message.vali_estimateTransaction_maxFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.maxPriorityFeePerGas, - message.vali_estimateTransaction_maxPriorityFeePerGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.paymasterAndData, - message.vali_estimateTransaction_paymasterAndData - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.preVerificationGas, - message.vali_estimateTransaction_preVerificationGas - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - op.signature, - message.vali_estimateTransaction_signature - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_1); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await sepoliaTestNetSdk.send(op); - - try { - assert.isNotEmpty(uoHash, message.vali_submitTransaction_uoHash); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('SMOKE: Perform the concurrent userops with valid details on the sepolia network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - helper.wait(data.mediumTimeout); - - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - - try { - assert.isNotEmpty( - transactionBatch.to, - message.vali_addTransaction_to - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.data, - message.vali_addTransaction_data - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - try { - assert.isNotEmpty( - transactionBatch.value, - message.vali_addTransaction_value - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await sepoliaTestNetSdk.getNativeBalance(); - - try { - assert.isNotEmpty(balance, message.vali_getBalance_balance); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount = 1; - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await sepoliaTestNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await sepoliaTestNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_submitTransaction_1); - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await sepoliaTestNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the incorrect To Address while estimate the added transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.incorrectRecipient, // incorrect to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await sepoliaTestNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_9); - assert.fail(message.fail_estimateTransaction_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.invalidRecipient, // invalid to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await sepoliaTestNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_10); - assert.fail(message.fail_estimateTransaction_10); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_9); - console.log(message.vali_estimateTransaction_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the invalid Value while estimate the added transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseUnits(data.invalidValue), // invalid value - }); - - addContext(test, message.fail_estimateTransaction_11); - assert.fail(message.fail_estimateTransaction_11); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_estimateTransaction_10); - console.log(message.vali_estimateTransaction_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_11); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token with the very small Value while estimate the added transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseUnits(data.smallValue), // very small value - }); - - addContext(test, message.fail_estimateTransaction_12); - assert.fail(message.fail_estimateTransaction_12); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_estimateTransaction_11); - console.log(message.vali_estimateTransaction_11); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_12); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer native token without adding transaction to the batch while estimate the added transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await sepoliaTestNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit with the incorrect To Address while estimate the added transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.incorrectRecipient, // incorrect to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await sepoliaTestNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_9); - assert.fail(message.fail_estimateTransaction_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit with the invalid To Address i.e. missing character while estimate the added transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.invalidRecipient, // invalid to address - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await sepoliaTestNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_10); - assert.fail(message.fail_estimateTransaction_10); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_8); - console.log(message.vali_estimateTransaction_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - xit('REGRESSION: Perform the transfer native token by passing callGasLimit without adding transaction to the batch while estimate the added transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - // passing callGasLimit as 40000 to manually set it - try { - await sepoliaTestNetSdk.estimate({ callGasLimit: 40000 }); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.empty_batch) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid provider netowrk details while Getting the Decimal from ERC20 Contract on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.invalidProviderNetwork_sepolia // invalid provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_estimateTransaction_14); - assert.fail(message.fail_estimateTransaction_14); - } catch (e) { - if (e.reason === constant.invalid_network_2) { - addContext(test, message.vali_estimateTransaction_13); - console.log(message.vali_estimateTransaction_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without provider netowrk details while Getting the Decimal from ERC20 Contract on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider(); // without provider - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_estimateTransaction_14); - assert.fail(message.fail_estimateTransaction_14); - } catch (e) { - if (e.reason === constant.invalid_network_2) { - addContext(test, message.vali_estimateTransaction_13); - console.log(message.vali_estimateTransaction_13); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_14); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with other provider netowrk details while Getting the Decimal from ERC20 Contract on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.otherProviderNetwork_sepolia // other provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_estimateTransaction_15); - assert.fail(message.fail_estimateTransaction_15); - } catch (e) { - let error = e.message; - if (error.includes(constant.invalid_value_3)) { - addContext(test, message.vali_estimateTransaction_14); - console.log(message.vali_estimateTransaction_14); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_15); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect Token Address details while Getting the Decimal from ERC20 Contract on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.incorrectTokenAddress_sepoliaUSDC, // incorrect token address - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(message.fail_erc20Transfer_1); - assert.fail(message.fail_erc20Transfer_1); - } catch (e) { - if (e.reason === constant.invalid_address_6) { - addContext(test, message.vali_erc20Transfer_1); - console.log(message.vali_erc20Transfer_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid Token Address i.e. missing character details while Getting the Decimal from ERC20 Contract on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.invalidTokenAddress_sepoliaUSDC, // invalid token address - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - - addContext(test, message.fail_erc20Transfer_2); - assert.fail(message.fail_erc20Transfer_2); - } catch (e) { - if (e.reason === constant.invalid_address_4) { - addContext(test, message.vali_erc20Transfer_2); - console.log(message.vali_erc20Transfer_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with null Token Address details while Getting the Decimal from ERC20 Contract on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - try { - new ethers.Contract(null, ERC20_ABI, provider); // null token address - - addContext(test, message.fail_erc20Transfer_3); - assert.fail(message.fail_erc20Transfer_3); - } catch (e) { - if (e.reason === constant.contract_address_2) { - addContext(test, message.vali_erc20Transfer_3); - console.log(message.vali_erc20Transfer_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect transfer method name while Getting the transferFrom encoded data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transferr', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_4); - assert.fail(message.fail_erc20Transfer_4); - } catch (e) { - if (e.reason === constant.no_function) { - addContext(test, message.vali_erc20Transfer_4); - console.log(message.vali_erc20Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid value while Getting the transferFrom encoded data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.invalidValue, decimals), // invalid value - ]); - - addContext(test, message.fail_erc20Transfer_5); - assert.fail(message.fail_erc20Transfer_5); - } catch (e) { - if (e.reason === constant.invalid_value_1) { - addContext(test, message.vali_erc20Transfer_5); - console.log(message.vali_erc20Transfer_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with very small value while Getting the transferFrom encoded data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.smallValue, decimals), // very small value - ]); - - addContext(test, message.fail_erc20Transfer_6); - assert.fail(message.fail_erc20Transfer_6); - } catch (e) { - if (e.reason === constant.invalid_value_2) { - addContext(test, message.vali_erc20Transfer_6); - console.log(message.vali_erc20Transfer_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without value while Getting the transferFrom encoded data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - try { - await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ]); - - addContext(test, message.fail_erc20Transfer_7); - assert.fail(message.fail_erc20Transfer_7); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc20Transfer_7); - console.log(message.vali_erc20Transfer_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with incorrect recipient while Getting the transferFrom encoded data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.incorrectRecipient, // incorrect recipient address - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_8); - assert.fail(message.fail_erc20Transfer_8); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc20Transfer_8); - console.log(message.vali_erc20Transfer_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with invalid recipient i.e. missing character while Getting the transferFrom encoded data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.invalidRecipient, // invalid recipient address - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_9); - assert.fail(message.fail_erc20Transfer_9); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc20Transfer_9); - console.log(message.vali_erc20Transfer_9); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_9); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without recipient while Getting the transferFrom encoded data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - - addContext(test, message.fail_erc20Transfer_10); - assert.fail(message.fail_erc20Transfer_10); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc20Transfer_10); - console.log(message.vali_erc20Transfer_10); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_10); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the incorrect Token Address while adding transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.incorrectTokenAddress_sepoliaUSDC, // Incorrect Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await sepoliaTestNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_16); - assert.fail(message.fail_estimateTransaction_16); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_6)) { - addContext(test, message.vali_estimateTransaction_15); - console.log(message.vali_estimateTransaction_15); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_16); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the invalid Token Address i.e. missing character while adding transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.invalidTokenAddress_sepoliaUSDC, // Invalid Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await sepoliaTestNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_17); - assert.fail(message.fail_estimateTransaction_17); - } catch (e) { - let error = e.reason; - if (error.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_16); - console.log(message.vali_estimateTransaction_16); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_17); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token with the null Token Address while adding transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: null, // Null Token Address - data: transactionData, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await sepoliaTestNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_18); - assert.fail(message.fail_estimateTransaction_18); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_17); - console.log(message.vali_estimateTransaction_17); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_18); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without Token Address while adding transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - let transactionData; - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - try { - await sepoliaTestNetSdk.addUserOpsToBatch({ - data: transactionData, // without tokenAddress - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await sepoliaTestNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_19); - assert.fail(message.fail_estimateTransaction_19); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_estimateTransaction_18); - console.log(message.vali_estimateTransaction_18); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_19); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC20 token without adding transaction to the batch while estimate the added transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_provider); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Transfer_contractInterface); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // get transferFrom encoded data - try { - erc20Instance.interface.encodeFunctionData('transfer', [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ]); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc20Contract_decimals); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // estimate transactions added to the batch - try { - await sepoliaTestNetSdk.estimate(); - - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc20Transaction_insufficientBalance); - console.warn(message.erc20Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect Sender Address while creating the NFT Data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.incorrectSender, // incorrect sender address - data.recipient, - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_1); - assert.fail(message.fail_erc721Transfer_1); - } catch (e) { - if (e.reason.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc721Transfer_1); - console.log(message.vali_erc721Transfer_1); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with invalid Sender Address i.e. missing character while creating the NFT Data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.invalidSender, // invalid sender address - data.recipient, - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_2); - assert.fail(message.fail_erc721Transfer_2); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc721Transfer_2); - console.log(message.vali_erc721Transfer_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_2); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without Sender Address while creating the NFT Data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.recipient, // not added sender address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_3); - assert.fail(message.fail_erc721Transfer_3); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_3); - console.log(message.vali_erc721Transfer_3); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_3); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect Recipient Address while creating the NFT Data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.incorrectRecipient, // incorrect recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_4); - assert.fail(message.fail_erc721Transfer_4); - } catch (e) { - if (e.reason.includes(constant.invalid_address_6)) { - addContext(test, message.vali_erc721Transfer_4); - console.log(message.vali_erc721Transfer_4); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_4); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with invalid Recipient Address i.e. missing character while creating the NFT Data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.invalidRecipient, // invalid recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_5); - assert.fail(message.fail_erc721Transfer_5); - } catch (e) { - if (e.reason.includes(constant.invalid_address_4)) { - addContext(test, message.vali_erc721Transfer_5); - console.log(message.vali_erc721Transfer_5); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_5); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without Recipient Address while creating the NFT Data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, // not added recipient address - data.tokenId, - ]); - - addContext(test, message.fail_erc721Transfer_6); - assert.fail(message.fail_erc721Transfer_6); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_6); - console.log(message.vali_erc721Transfer_6); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_6); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token with incorrect tokenId while creating the NFT Data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.incorrectTokenId, // incorrect tokenid - ]); - - addContext(message.fail_erc721Transfer_7); - assert.fail(message.fail_erc721Transfer_7); - } catch (e) { - if (e.reason === constant.invalid_bignumber_1) { - addContext(test, message.vali_erc721Transfer_7); - console.log(message.vali_erc721Transfer_7); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_7); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT token without tokenId while creating the NFT Data on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, // not added tokenid - ]); - - addContext(test, message.fail_erc721Transfer_8); - assert.fail(message.fail_erc721Transfer_8); - } catch (e) { - if (e.reason === constant.invalid_value_4) { - addContext(test, message.vali_erc721Transfer_8); - console.log(message.vali_erc721Transfer_8); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_8); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the transfer ERC721 NFT Token without adding transaction to the batch while estimate the added transactions to the batch on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - // get erc721 Contract Interface - let erc721Interface; - try { - erc721Interface = new ethers.utils.Interface(abi.abi); - - erc721Interface.encodeFunctionData('transferFrom', [ - data.sender, - data.recipient, - data.tokenId, - ]); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_erc721Transfer_contractInterface); - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // get balance of the account address - try { - await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // estimate transactions added to the batch - try { - await sepoliaTestNetSdk.estimate(); - - addContext(test, message.fail_estimateTransaction_13); - assert.fail(message.fail_estimateTransaction_13); - } catch (e) { - if (e.message === constant.invalid_parameter) { - addContext(test, message.vali_estimateTransaction_12); - console.log(message.vali_estimateTransaction_12); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_estimateTransaction_13); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.erc721Transaction_insufficientBalance); - console.warn(message.erc721Transaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops with invalid concurrentUseropsCount on the sepolia network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount = -5; // invalid concurrent userops - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await sepoliaTestNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await sepoliaTestNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - addContext(test, message.fail_submitTransaction_1); - assert.fail(message.fail_submitTransaction_1); - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await sepoliaTestNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops without concurrentUseropsCount on the sepolia network', async function () { - // NOTE: assume the sender wallet is deployed - - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_clearTransaction_1); - } - - // add transactions to the batch - let transactionBatch; - try { - transactionBatch = await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_addTransaction_1); - } - - // get balance of the account address - let balance; - try { - balance = await sepoliaTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getBalance_1); - } - - // Note that usually Bundlers do not allow sending more than 10 concurrent userops from an unstaked entites (wallets, factories, paymaster) - // Staked entities can send as many userops as they want - let concurrentUseropsCount; // invalid concurrent userops - const userops = []; - const uoHashes = []; - - try { - while (--concurrentUseropsCount >= 0) { - const op = await sepoliaTestNetSdk.estimate({ - key: concurrentUseropsCount, - }); - userops.push(op); - } - - console.log('Sending userops...'); - for (const op of userops) { - const uoHash = await sepoliaTestNetSdk.send(op); - uoHashes.push(uoHash); - } - } catch (e) { - addContext(test, message.fail_submitTransaction_1); - assert.fail(message.fail_submitTransaction_1); - } - - try { - console.log('Waiting for transactions...'); - const userOpsReceipts = new Array(uoHashes.length).fill(null); - const timeout = Date.now() + 60000; // 1 minute timeout - while ( - userOpsReceipts.some((receipt) => receipt == null) && - Date.now() < timeout - ) { - helper.wait(2000); - for (let i = 0; i < uoHashes.length; ++i) { - if (userOpsReceipts[i]) continue; - const uoHash = uoHashes[i]; - userOpsReceipts[i] = - await sepoliaTestNetSdk.getUserOpReceipt(uoHash); - } - } - - if (userOpsReceipts.some((receipt) => receipt != null)) { - for (const uoReceipt of userOpsReceipts) { - if (!uoReceipt) continue; - addContext(test, message.vali_submitTransaction_1); - console.log(message.vali_submitTransaction_1); - } - } else { - addContext(test, message.vali_submitTransaction_2); - console.log(message.vali_submitTransaction_2); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_getUserOpReceipt_1); - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); - - it('REGRESSION: Perform the concurrent userops with non deployed address on the sepolia network', async function () { - var test = this; - if (runTest) { - await customRetryAsync(async function () { - const provider = new providers.JsonRpcProvider(); - - try { - if ((await provider.getCode(data.eoaAddress)).length <= 2) { - addContext(test, message.vali_deployAddress_1); - console.log(message.vali_deployAddress_1); - return; - } - - addContext(test, message.fail_deployAddress_1); - assert.fail(message.fail_deployAddress_1); - } catch (e) { - const errorMessage = e.message; - if (errorMessage.includes(constant.invalid_network_2)) { - addContext(test, message.vali_deployAddress_2); - console.log(message.vali_deployAddress_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_deployAddress_1); - } - } - }, data.retry); // Retry this async test up to 5 times - } else { - addContext(test, message.nativeTransaction_insufficientBalance); - console.warn(message.nativeTransaction_insufficientBalance); - test.skip(); - } - }); -}); diff --git a/test/utils/sharedData_mainnet.js b/test/utils/sharedData_mainnet.js new file mode 100644 index 0000000..a8584fc --- /dev/null +++ b/test/utils/sharedData_mainnet.js @@ -0,0 +1,165 @@ +// chain configurations +const chainConfigs = { + 100: { + name: 'gnosis', + invalidId: '110 ', + incorrectId: '111000', + toChainId: '137', + toTokenAddress: '0x2791Bca1f2de4661ED88A30C99A7a9449Aa84174', + toTokenName: 'USDC', + incorrectToTokenAddress: '0x2791Bca1f2de4661ED88A30C99A719449Aa84174', + invalidToTokenAddress: '0x2791Bca1f2de4661ED88A30C99A7a9449Aa8417', + tokenAddress: '0xDDAfbb505ad214D7b80b1f830fcCc89B60fb7A83', + tokenName: 'USDC', + incorrectTokenAddress: '0xDDAfbb505ad214D7b80b1f830fcCc19B60fb7A83', + invalidTokenAddress: '0xDDAfbb505ad214D7b80b1f830fcCc89B60fb7A8', + tokenAddressUsdt: '0x4ECaBa5870353805a9F068101A40E0f32ed605C6', + tokenNameUsdt: 'USDT', + incorrectTokenAddressUsdt: '0xAECDBa5770353855a9F068104A40E0f32e2605C6', + invalidTokenAddressUsdt: '0x4ECaBa5870353805a9F068101A40E0f32ed605C', + providerNetwork: 'https://rpc.etherspot.io/gnosis', + invalidProviderNetwork: 'http://rpc.etherspot.io/gnosis', + otherProviderNetwork: 'https://rpc.etherspot.io/polygon', + }, + 137: { + name: 'polygon', + invalidId: '110 ', + incorrectId: '1730', + toChainId: '100', + toTokenAddress: '0xDDAfbb505ad214D7b80b1f830fcCc89B60fb7A83', + toTokenName: 'USDC', + incorrectToTokenAddress: '0xDDAfbb505ad214D7b80b1f830fcCc19B60fb7A83', + invalidToTokenAddress: '0xDDAfbb505ad214D7b80b1f830fcCc89B60fb7A8', + tokenAddress: '0x2791Bca1f2de4661ED88A30C99A7a9449Aa84174', + tokenName: 'USDC', + incorrectTokenAddress: '0x2791Bca1f2de4661ED88A30C99A719449Aa84174', + invalidTokenAddress: '0x2791Bca1f2de4661ED88A30C99A7a9449Aa8417', + tokenAddressUsdt: '0xc2132D05D31c914a87C6611C10748AEb04B58e8F', + tokenNameUsdt: 'USDT', + incorrectTokenAddressUsdt: '0xAECDBa5770353855a9F068104A40E0f32e2605C6', + invalidTokenAddressUsdt: '0xc2132D05D31c914a87C6611C10748AEb04B58e8', + providerNetwork: 'https://polygon-bundler.etherspot.io', + invalidProviderNetwork: 'http://polygon-bundler.etherspot.io', + otherProviderNetwork: 'https://arbitrum-bundler.etherspot.io', + }, +}; + +// convert configurations to arrays for backward compatibility +const chainId = Object.keys(chainConfigs).map(Number); +const chainName = chainId.map((id) => chainConfigs[id].name); +const invalid_chainId = chainId.map((id) => Number(chainConfigs[id].invalidId)); +const incorrect_chainId = chainId.map((id) => + Number(chainConfigs[id].incorrectId) +); +const toChainId = chainId.map((id) => chainConfigs[id].toChainId); +const toTokenAddress = chainId.map((id) => chainConfigs[id].toTokenAddress); +const toTokenName = chainId.map((id) => chainConfigs[id].toTokenName); +const invalid_toTokenAddress = chainId.map( + (id) => chainConfigs[id].invalidToTokenAddress +); +const incorrect_toTokenAddress = chainId.map( + (id) => chainConfigs[id].incorrectToTokenAddress +); + +const tokenAddress = chainId.map((id) => chainConfigs[id].tokenAddress); +const tokenName = chainId.map((id) => chainConfigs[id].tokenName); +const invalid_tokenAddress = chainId.map( + (id) => chainConfigs[id].invalidTokenAddress +); +const incorrect_tokenAddress = chainId.map( + (id) => chainConfigs[id].incorrectTokenAddress +); +const tokenAddressUsdt = chainId.map((id) => chainConfigs[id].tokenAddressUsdt); +const tokenNameUsdt = chainId.map((id) => chainConfigs[id].tokenNameUsdt); +const invalid_tokenAddressUsdt = chainId.map( + (id) => chainConfigs[id].invalidTokenAddressUsdt +); +const incorrect_tokenAddressUsdt = chainId.map( + (id) => chainConfigs[id].incorrectTokenAddressUsdt +); +const providerNetwork = chainId.map((id) => chainConfigs[id].providerNetwork); +const invalidProviderNetwork = chainId.map( + (id) => chainConfigs[id].invalidProviderNetwork +); +const otherProviderNetwork = chainId.map( + (id) => chainConfigs[id].otherProviderNetwork +); + +// Get chain ID from CLI or random selection +const getSelectedChain = () => { + // Check for CLI parameter + const args = process.argv.slice(2); + const chainIdArg = args.find((arg) => arg.startsWith('--chainId=')); + + if (chainIdArg) { + const selectedChainId = Number(chainIdArg.split('=')[1]); + + // Validate the provided chain ID + if (!chainConfigs[selectedChainId]) { + throw new Error( + `Invalid chainId: ${selectedChainId}. Supported chains: ${chainId.join(', ')}` + ); + } + + return { + chainId: selectedChainId, + index: chainId.indexOf(selectedChainId), + }; + } + + // Random selection if no chainId specified + const randomIndex = Math.floor(Math.random() * chainId.length); + return { + chainId: chainId[randomIndex], + index: randomIndex, + }; +}; + +// Get the selected or random chain +const { chainId: selectedChainId, index: selectedIndex } = getSelectedChain(); + +// Export the selected values +const randomChainId = selectedChainId; +const randomChainName = chainName[selectedIndex]; +const randomInvalidChainId = invalid_chainId[selectedIndex]; +const randomIncorrectChainId = incorrect_chainId[selectedIndex]; +const randomToChainId = toChainId[selectedIndex]; +const randomToTokenAddress = toTokenAddress[selectedIndex]; +const randomToTokenName = toTokenName[selectedIndex]; +const randomInvalidToTokenAddress = invalid_toTokenAddress[selectedIndex]; +const randomIncorrectToTokenAddress = incorrect_toTokenAddress[selectedIndex]; +const randomTokenAddress = tokenAddress[selectedIndex]; +const randomTokenName = tokenName[selectedIndex]; +const randomInvalidTokenAddress = invalid_tokenAddress[selectedIndex]; +const randomIncorrectTokenAddress = incorrect_tokenAddress[selectedIndex]; +const randomTokenAddressUsdt = tokenAddressUsdt[selectedIndex]; +const randomTokenNameUsdt = tokenNameUsdt[selectedIndex]; +const randomInvalidTokenAddressUsdt = invalid_tokenAddressUsdt[selectedIndex]; +const randomIncorrectTokenAddressUsdt = + incorrect_tokenAddressUsdt[selectedIndex]; +const randomProviderNetwork = providerNetwork[selectedIndex]; +const randomInvalidProviderNetwork = invalidProviderNetwork[selectedIndex]; +const randomOtherProviderNetwork = otherProviderNetwork[selectedIndex]; + +export { + randomChainId, + randomChainName, + randomInvalidChainId, + randomIncorrectChainId, + randomToChainId, + randomToTokenAddress, + randomToTokenName, + randomInvalidToTokenAddress, + randomIncorrectToTokenAddress, + randomTokenAddress, + randomTokenName, + randomInvalidTokenAddress, + randomIncorrectTokenAddress, + randomTokenAddressUsdt, + randomTokenNameUsdt, + randomIncorrectTokenAddressUsdt, + randomInvalidTokenAddressUsdt, + randomProviderNetwork, + randomInvalidProviderNetwork, + randomOtherProviderNetwork, +}; diff --git a/test/utils/sharedData_testnet.js b/test/utils/sharedData_testnet.js new file mode 100644 index 0000000..3fd9e28 --- /dev/null +++ b/test/utils/sharedData_testnet.js @@ -0,0 +1,144 @@ +// chain configurations +const chainConfigs = { + 11155111: { + name: 'sepolia', + invalidId: '11155122', + incorrectId: '84555', + toChainId: '80002', + toTokenAddress: '0x0Fd9e8d3aF1aaee056EB9e802c3A762a667b1904', + toTokenName: 'USDC', + incorrectToTokenAddress: '0x0Ad9e1d3aF1acee056EB9e502c3A765a667b1905', + invalidToTokenAddress: '0x0Fd9e8d3aF1aaee056EB9e802c3A762a667b190', + tokenAddress: '0x1c7D4B196Cb0C7B01d743Fbc6116a902379C7238', + tokenName: 'USDC', + incorrectTokenAddress: '0x1a7D4A196Cb0C2B01d748Fbc6116a302379C7233', + invalidTokenAddress: '0x1c7D4B196Cb0C7B01d743Fbc6116a902379C723', + tokenAddressUsdt: '0xe90a57A45F1Eae578F5aec8eed5bA8Fc6F55eF65', + tokenNameUsdt: 'USDT', + incorrectTokenAddressUsdt: '0xe10b57A45A1Eae178F5aec8aed5bA8Fc6F55eF15', + invalidTokenAddressUsdt: '0xe90a57A45F1Eae578F5aec8eed5bA8Fc6F55eF6', + providerNetwork: 'https://testnet-rpc.etherspot.io/v1/11155111', + invalidProviderNetwork: 'http://testnet-rpc.etherspot.io/v1/11155111', + otherProviderNetwork: 'https://testnet-rpc.etherspot.io/v1/80002', + }, +}; + +// convert configurations to arrays for backward compatibility +const chainId = Object.keys(chainConfigs).map(Number); +const chainName = chainId.map((id) => chainConfigs[id].name); +const invalid_chainId = chainId.map((id) => Number(chainConfigs[id].invalidId)); +const incorrect_chainId = chainId.map((id) => + Number(chainConfigs[id].incorrectId) +); +const toChainId = chainId.map((id) => chainConfigs[id].toChainId); +const toTokenAddress = chainId.map((id) => chainConfigs[id].toTokenAddress); +const toTokenName = chainId.map((id) => chainConfigs[id].toTokenName); +const invalid_toTokenAddress = chainId.map( + (id) => chainConfigs[id].invalidToTokenAddress +); +const incorrect_toTokenAddress = chainId.map( + (id) => chainConfigs[id].incorrectToTokenAddress +); + +const tokenAddress = chainId.map((id) => chainConfigs[id].tokenAddress); +const tokenName = chainId.map((id) => chainConfigs[id].tokenName); +const invalid_tokenAddress = chainId.map( + (id) => chainConfigs[id].invalidTokenAddress +); +const incorrect_tokenAddress = chainId.map( + (id) => chainConfigs[id].incorrectTokenAddress +); +const tokenAddressUsdt = chainId.map((id) => chainConfigs[id].tokenAddressUsdt); +const tokenNameUsdt = chainId.map((id) => chainConfigs[id].tokenNameUsdt); +const invalid_tokenAddressUsdt = chainId.map( + (id) => chainConfigs[id].invalidTokenAddressUsdt +); +const incorrect_tokenAddressUsdt = chainId.map( + (id) => chainConfigs[id].incorrectTokenAddressUsdt +); +const providerNetwork = chainId.map((id) => chainConfigs[id].providerNetwork); +const invalidProviderNetwork = chainId.map( + (id) => chainConfigs[id].invalidProviderNetwork +); +const otherProviderNetwork = chainId.map( + (id) => chainConfigs[id].otherProviderNetwork +); + +// Get chain ID from CLI or random selection +const getSelectedChain = () => { + // Check for CLI parameter + const args = process.argv.slice(2); + const chainIdArg = args.find((arg) => arg.startsWith('--chainId=')); + + if (chainIdArg) { + const selectedChainId = Number(chainIdArg.split('=')[1]); + + // Validate the provided chain ID + if (!chainConfigs[selectedChainId]) { + throw new Error( + `Invalid chainId: ${selectedChainId}. Supported chains: ${chainId.join(', ')}` + ); + } + + return { + chainId: selectedChainId, + index: chainId.indexOf(selectedChainId), + }; + } + + // Random selection if no chainId specified + const randomIndex = Math.floor(Math.random() * chainId.length); + return { + chainId: chainId[randomIndex], + index: randomIndex, + }; +}; + +// Get the selected or random chain +const { chainId: selectedChainId, index: selectedIndex } = getSelectedChain(); + +// Export the selected values +const randomChainId = selectedChainId; +const randomChainName = chainName[selectedIndex]; +const randomInvalidChainId = invalid_chainId[selectedIndex]; +const randomIncorrectChainId = incorrect_chainId[selectedIndex]; +const randomToChainId = toChainId[selectedIndex]; +const randomToTokenAddress = toTokenAddress[selectedIndex]; +const randomToTokenName = toTokenName[selectedIndex]; +const randomInvalidToTokenAddress = invalid_toTokenAddress[selectedIndex]; +const randomIncorrectToTokenAddress = incorrect_toTokenAddress[selectedIndex]; +const randomTokenAddress = tokenAddress[selectedIndex]; +const randomTokenName = tokenName[selectedIndex]; +const randomInvalidTokenAddress = invalid_tokenAddress[selectedIndex]; +const randomIncorrectTokenAddress = incorrect_tokenAddress[selectedIndex]; +const randomTokenAddressUsdt = tokenAddressUsdt[selectedIndex]; +const randomTokenNameUsdt = tokenNameUsdt[selectedIndex]; +const randomInvalidTokenAddressUsdt = invalid_tokenAddressUsdt[selectedIndex]; +const randomIncorrectTokenAddressUsdt = + incorrect_tokenAddressUsdt[selectedIndex]; +const randomProviderNetwork = providerNetwork[selectedIndex]; +const randomInvalidProviderNetwork = invalidProviderNetwork[selectedIndex]; +const randomOtherProviderNetwork = otherProviderNetwork[selectedIndex]; + +export { + randomChainId, + randomChainName, + randomInvalidChainId, + randomIncorrectChainId, + randomToChainId, + randomToTokenAddress, + randomToTokenName, + randomInvalidToTokenAddress, + randomIncorrectToTokenAddress, + randomTokenAddress, + randomTokenName, + randomInvalidTokenAddress, + randomIncorrectTokenAddress, + randomTokenAddressUsdt, + randomTokenNameUsdt, + randomIncorrectTokenAddressUsdt, + randomInvalidTokenAddressUsdt, + randomProviderNetwork, + randomInvalidProviderNetwork, + randomOtherProviderNetwork, +}; From 3b08c65e4b5eaa765e6c823a9161109b51685e83 Mon Sep 17 00:00:00 2001 From: Jineshdarjee Date: Wed, 8 Jan 2025 19:12:33 +0530 Subject: [PATCH 2/3] Rearrange and modified paymasterapi test cases --- package.json | 31 +- test/data/apiTestData.json | 3 + test/data/constant.json | 25 +- test/data/messages.json | 834 +++++- test/data/testData.json | 63 +- .../loadAndPerformance/arka/goerli.spec.js | 353 --- .../arka/loadTesting.spec.js | 12 - .../loadAndPerformance/arka/mumbai.spec.js | 353 --- .../loadAndPerformance/skandha/goerli.spec.js | 361 --- .../skandha/loadTesting.spec.js | 12 - .../loadAndPerformance/skandha/mumbai.spec.js | 361 --- .../paymasterAPIs/arka/checkWhitelist.spec.js | 790 ++++++ .../paymasterAPIs/arka/deposit.spec.js | 648 +++++ .../arka/getAllWhitelist.spec.js | 296 +++ .../paymasterAPIs/arka/metadata.spec.js | 264 ++ .../paymasterAPIs/arka/pimlicoAddress.spec.js | 479 ++++ .../arka/removeWhitelist.spec.js | 798 ++++++ .../paymasterAPIs/arka/whitelist.spec.js | 671 +++++ .../eth_estimateUserOperationGas.spec.js | 2261 +++++++++++++++++ .../eth_getUserOperationByHash.spec.js | 507 ++++ .../eth_getUserOperationReceipt.spec.js | 600 +++++ .../skandha/eth_sendUserOperation.spec.js | 2195 ++++++++++++++++ .../skandha/skandha_config.spec.js | 199 ++ .../skandha/skandha_feeHistory.spec.js | 301 +++ .../skandha/skandha_getGasPrice.spec.js | 180 ++ .../sponsorshipPolicy/addPolicy.spec.js | 878 +++++++ .../sponsorshipPolicy/deletePolicy.spec.js | 418 +++ .../sponsorshipPolicy/disablePolicy.spec.js | 457 ++++ .../sponsorshipPolicy/enablePolicy.spec.js | 457 ++++ .../getLatestPolicyWalletAddress.spec.js | 288 +++ ...etLatestPolicyWalletAddressChainid.spec.js | 413 +++ ...LatestPolicyWalletAddressEPVersion.spec.js | 408 +++ ...olicyWalletAddressEPVersionChainid.spec.js | 570 +++++ .../sponsorshipPolicy/getPolicy.spec.js | 185 ++ .../sponsorshipPolicy/getPolicyId.spec.js | 448 ++++ .../getPolicyWalletAddress.spec.js | 286 +++ .../getPolicyWalletAddressEPVersion.spec.js | 406 +++ ...olicyWalletAddressEPVersionChainid.spec.js | 516 ++++ .../sponsorshipPolicy/updatePolicy.spec.js | 1786 +++++++++++++ .../erc20Transaction/mumbai.spec.js | 150 -- .../erc20Transaction/sepolia.spec.js | 150 -- .../paymasterAPIs/arka/checkWhitelist.spec.js | 790 ++++++ .../paymasterAPIs/arka/deposit.spec.js | 648 +++++ .../arka/getAllWhitelist.spec.js | 296 +++ .../paymasterAPIs/arka/metadata.spec.js | 264 ++ .../paymasterAPIs/arka/pimlicoAddress.spec.js | 479 ++++ .../arka/removeWhitelist.spec.js | 798 ++++++ .../paymasterAPIs/arka/whitelist.spec.js | 671 +++++ .../eth_estimateUserOperationGas.spec.js | 2261 +++++++++++++++++ .../eth_getUserOperationByHash.spec.js | 507 ++++ .../eth_getUserOperationReceipt.spec.js | 600 +++++ .../skandha/eth_sendUserOperation.spec.js | 2195 ++++++++++++++++ .../skandha/skandha_config.spec.js | 199 ++ .../skandha/skandha_feeHistory.spec.js | 301 +++ .../skandha/skandha_getGasPrice.spec.js | 180 ++ .../sponsorshipPolicy/addPolicy.spec.js | 878 +++++++ .../sponsorshipPolicy/deletePolicy.spec.js | 418 +++ .../sponsorshipPolicy/disablePolicy.spec.js | 457 ++++ .../sponsorshipPolicy/enablePolicy.spec.js | 457 ++++ .../getLatestPolicyWalletAddress.spec.js | 288 +++ ...etLatestPolicyWalletAddressChainid.spec.js | 413 +++ ...LatestPolicyWalletAddressEPVersion.spec.js | 408 +++ ...olicyWalletAddressEPVersionChainid.spec.js | 570 +++++ .../sponsorshipPolicy/getPolicy.spec.js | 185 ++ .../sponsorshipPolicy/getPolicyId.spec.js | 448 ++++ .../getPolicyWalletAddress.spec.js | 286 +++ .../getPolicyWalletAddressEPVersion.spec.js | 406 +++ ...olicyWalletAddressEPVersionChainid.spec.js | 516 ++++ .../sponsorshipPolicy/updatePolicy.spec.js | 1786 +++++++++++++ test/utils/baseTest.js | 62 +- test/utils/sharedData_mainnet.js | 42 +- 71 files changed, 36414 insertions(+), 1808 deletions(-) delete mode 100644 test/specs/loadAndPerformance/arka/goerli.spec.js delete mode 100644 test/specs/loadAndPerformance/arka/loadTesting.spec.js delete mode 100644 test/specs/loadAndPerformance/arka/mumbai.spec.js delete mode 100644 test/specs/loadAndPerformance/skandha/goerli.spec.js delete mode 100644 test/specs/loadAndPerformance/skandha/loadTesting.spec.js delete mode 100644 test/specs/loadAndPerformance/skandha/mumbai.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/arka/checkWhitelist.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/arka/deposit.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/arka/getAllWhitelist.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/arka/metadata.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/arka/pimlicoAddress.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/arka/removeWhitelist.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/arka/whitelist.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/skandha/eth_estimateUserOperationGas.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/skandha/eth_getUserOperationByHash.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/skandha/eth_getUserOperationReceipt.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/skandha/eth_sendUserOperation.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/skandha/skandha_config.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/skandha/skandha_feeHistory.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/skandha/skandha_getGasPrice.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/addPolicy.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/deletePolicy.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/disablePolicy.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/enablePolicy.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddress.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressChainid.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersion.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersionChainid.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicy.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyId.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddress.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersion.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersionChainid.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/updatePolicy.spec.js delete mode 100644 test/specs/skandhaBenchmark/erc20Transaction/mumbai.spec.js delete mode 100644 test/specs/skandhaBenchmark/erc20Transaction/sepolia.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/arka/checkWhitelist.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/arka/deposit.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/arka/getAllWhitelist.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/arka/metadata.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/arka/pimlicoAddress.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/arka/removeWhitelist.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/arka/whitelist.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/skandha/eth_estimateUserOperationGas.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/skandha/eth_getUserOperationByHash.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/skandha/eth_getUserOperationReceipt.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/skandha/eth_sendUserOperation.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/skandha/skandha_config.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/skandha/skandha_feeHistory.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/skandha/skandha_getGasPrice.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/addPolicy.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/deletePolicy.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/disablePolicy.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/enablePolicy.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddress.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressChainid.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersion.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersionChainid.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicy.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyId.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddress.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersion.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersionChainid.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/sponsorshipPolicy/updatePolicy.spec.js diff --git a/package.json b/package.json index 1dc1811..3090250 100644 --- a/package.json +++ b/package.json @@ -5,43 +5,20 @@ "main": "index.js", "type": "module", "scripts": { - "test": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/transferringFunds/sepolia.spec.js --reporter mochawesome --require mochawesome/register --reporter-options charts=true,code=false,showHooks=always", + "test": "mocha --timeout 600000 --spec test/specs/paymasterAPIs/skandha/skandha_feeHistory.spec.js --reporter mochawesome --require mochawesome/register --reporter-options charts=true,code=false,showHooks=always", "test-mainnet": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/*/oldWallet_*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", "test-mainnet-precondition": "mocha --timeout 600000 --spec test/specs/mainnet/precondition/*_newWallet.spec.js", "test-mainnet-postcondition": "mocha --timeout 600000 --spec test/specs/mainnet/postcondition/*_newWallet.spec.js", "test-mainnet-newWallet": "npm run test-mainnet-precondition; mocha --timeout 600000 --spec test/specs/mainnet/*/newWallet_*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always; npm run test-mainnet-postcondition", "test-mainnet-combined": "npm run test-mainnet-precondition; mocha --timeout 600000 --spec test/specs/mainnet/*/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always; npm run test-mainnet-postcondition", - "test-mainnet-transfertoken": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transferringFunds/*transferringFunds.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-transactionhistory": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/transactionHistory/*transactionHistory.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-swap": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/swap/*swap.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-paymaster": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/paymaster/*paymaster.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-listandrates": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/listAndRates/*listAndRates.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-getaddresses": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/getAddresses/*getAddresses.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-mainnet-connext": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/connext/*connext.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-mainnet-paymasterapi": "mocha --timeout 600000 --spec test/specs/mainnet/paymasterAPIs/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", "test-testnet": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/*/oldWallet_.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", "test-testnet-precondition": "mocha --timeout 600000 --spec test/specs/testnet/precondition/*_newWallet.spec.js", "test-testnet-postcondition": "mocha --timeout 600000 --spec test/specs/testnet/postcondition/*_newWallet.spec.js", "test-testnet-newWallet": "npm run test-testnet-precondition; mocha --timeout 600000 --spec test/specs/testnet/*/newWallet_*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always; npm run test-testnet-postcondition", "test-testnet-combined": "npm run test-testnet-precondition; mocha --timeout 600000 --spec test/specs/testnet/*/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always; npm run test-testnet-postcondition", - "test-testnet-transfertoken": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/transferringFunds/*transferringFunds.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-transactionhistory": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/transactionHistory/*transactionHistory.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-swap": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/swap/*swap.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-paymaster": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/paymaster/*paymaster.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-listandrates": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/listAndRates/*listAndRates.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-getaddresses": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/getAddresses/*getAddresses.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-testnet-connext": "mocha --timeout 600000 --spec test/specs/testnet/version/version.spec.js --spec test/specs/testnet/connext/*connext.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-arka-mumbai": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/arka/mumbai.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-arka-goerli": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/arka/goerli.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-arka": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/arka/mumbai.spec.js --spec test/specs/loadAndPerformance/arka/goerli.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-skandha-mumbai": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/skandha/mumbai.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-skandha-goerli": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/skandha/goerli.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-skandha": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/skandha/mumbai.spec.js --spec test/specs/loadAndPerformance/skandha/goerli.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-performance": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/*/mumbai.spec.js --spec test/specs/loadAndPerformance/*/goerli.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", - "test-load-arka": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/arka/loadTesting.spec.js", - "test-load-skandha": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/skandha/loadTesting.spec.js", - "test-load": "mocha --timeout 600000 --spec test/specs/loadAndPerformance/*/loadTesting.spec.js", - "test-benchmark": "mocha --timeout 600000 --parallel --spec test/specs/skandhaBenchmark/mumbai/*.spec.js --spec test/specs/skandhaBenchmark/sepolia/*.spec.js --reporter mochawesome --require mochawesome/register --reporter-options charts=true,code=false,showHooks=always", - "test-service-benchmark": "mocha --timeout 600000 --spec test/specs/skandhaBenchmark/erc20Transaction/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always" + "test-testnet-paymasterapi": "mocha --timeout 600000 --spec test/specs/testnet/paymasterAPIs/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", + "test-service-benchmark": "mocha --timeout 600000 --spec test/specs/hostedServiceBanchmark/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always" }, "author": "", "license": "ISC", diff --git a/test/data/apiTestData.json b/test/data/apiTestData.json index 9dde1fd..f668c6c 100644 --- a/test/data/apiTestData.json +++ b/test/data/apiTestData.json @@ -11,8 +11,11 @@ "mumbai_chainid": 80001, "goerli_chainid": 5, "arka_deposit": "https://arka.etherspot.io/deposit", + "arka_deposit_v2": "https://arka.etherspot.io/deposit/v2", "arka_whitelist": "https://arka.etherspot.io/whitelist", + "arka_whitelist_v2": "https://arka.etherspot.io/whitelist/v2", "arka_checkwhitelist": "https://arka.etherspot.io/checkWhitelist", + "arka_checkwhitelist_v2": "https://arka.etherspot.io/checkWhitelist/v2", "arka_pimlico": "https://arka.etherspot.io/pimlicoAddress", "address": "0x684E10D67dc4159B813Db70a1DAeB5FcC98bd034", "entryPointAddress": "0x5FF137D4b0FDCD49DcA30c7CF57E578a026d2789", diff --git a/test/data/constant.json b/test/data/constant.json index 8ab9d86..b96d0ef 100644 --- a/test/data/constant.json +++ b/test/data/constant.json @@ -30,7 +30,10 @@ "add_whitelist_3": "Successfully whitelisted with transaction Hash", "check_whitelist_1": "Already added", "check_whitelist_2": "Not added yet", + "getAllWhitelist_1": "No whitelist were found on the given apiKey/policyId", "deposit_1": "Successfully deposited", + "deposit_2": "Successfully deposited with transaction Hash", + "deposit_3": "Balance is less than the amount to be deposited", "invalid_network_1": "Invalid network/token", "invalid_network_2": "could not detect network", @@ -55,5 +58,25 @@ "invalid_apiKey": "Invalid Api Key", "invalid_data": "Invalid data provided", - "unsupported_chainid": "Unsupported chain id, visit https://etherspot.fyi for more information" + "unsupported_chainid": "Unsupported chain id, visit https://etherspot.fyi for more information", + + "sponsorshipPolicy_walletAddress_1": "Wallet address does not match for the Api Key", + "sponsorshipPolicy_walletAddress_2": "Invalid sponsorship policy data", + "sponsorshipPolicy_walletAddress_3": "Name and description are required fields", + "sponsorshipPolicy_walletAddress_4": "Supported entry point versions are required and must be valid. You entered: none. Valid values are: EPV_06, EPV_07", + "sponsorshipPolicy_walletAddress_5": "Supported entry point versions are required and must be valid. You entered: EPV_06, EP_07. Valid values are: EPV_06, EPV_07", + "sponsorshipPolicy_walletAddress_6": "Sponsorship policy not found", + "sponsorshipPolicy_walletAddress_7": "Invalid sponsorship policy id", + "sponsorshipPolicy_walletAddress_8": "Route PUT:/enable-policy not found", + "sponsorshipPolicy_walletAddress_9": "Not Found", + "sponsorshipPolicy_walletAddress_10": "Failed to update sponsorship policy", + "sponsorshipPolicy_walletAddress_11": "Unsupported EP version", + "sponsorshipPolicy_walletAddress_12": "Invalid URL", + "sponsorshipPolicy_walletAddress_13": "Api Key is required in header", + "sponsorshipPolicy_walletAddress_14": "Cannot disable a policy which is already disabled", + "sponsorshipPolicy_walletAddress_15": "Cannot enable a policy which is already enabled", + + "successCode_1": 200, + "skandha_error_1": "Invalid Request", + "skandha_error_2": "Unexpected behaviour" } diff --git a/test/data/messages.json b/test/data/messages.json index a2f8ea3..dab412e 100644 --- a/test/data/messages.json +++ b/test/data/messages.json @@ -178,8 +178,10 @@ "_comment8": "Paymaster - Arka Functions", "vali_metadata_sponsorAddress": "The sponsorAddress is empty in the metadata response.", "vali_metadata_sponsorWalletBalance": "The sponsorWalletBalance is empty in the metadata response.", + "vali_metadata_sponsorBalance": "The sponsorBalance is empty in the metadata response.", "vali_metadata_chainsSupported": "The chainsSupported is empty in the metadata response.", "vali_metadata_tokenPaymasters": "The tokenPaymasters is empty in the metadata response.", + "vali_metadata_multiTokenPaymasters": "The multiTokenPaymasters is empty in the metadata response.", "fail_metadata_1": "An error is displayed while calling the metadata function of arka.", "vali_getTokenPaymasterAddress_tokenPaymasterAddress": "The tokenPaymasterAddress is empty in the getTokenPaymasterAddress response.", "vali_getTokenPaymasterAddress_1": "The validation is displayed while getting token paymaster address for incorrect token.", @@ -525,5 +527,835 @@ "fail_connext_24": "The validation is not displayed while getting the transaction status with invalid transactionHash.", "fail_connext_25": "The validation is not displayed while getting the transaction status with incorrect transactionHash.", "fail_connext_26": "The validation is not displayed while getting the transaction status without transactionHash.", - "connext_insufficientBalance": "DUE TO INSUFFICIENT WALLET BALANCE, SKIPPING TEST CASE OF THE CONNEXT ENDPOINTS" + "connext_insufficientBalance": "DUE TO INSUFFICIENT WALLET BALANCE, SKIPPING TEST CASE OF THE CONNEXT ENDPOINTS", + + "vali_addPolicy_walletAddress": "The wallet address is not displayed as expected in the add policy response.", + "vali_addPolicy_name": "The name is not displayed as expected in the add policy response.", + "vali_addPolicy_description": "The description is not displayed as expected in the add policy response.", + "vali_addPolicy_id": "The description is not displayed in the add policy response.", + "vali_addPolicy_2": "The respective validation is not displayed when an invalid wallet address is added while adding the policy.", + "vali_addPolicy_3": "The respective validation is not displayed when an incorrect wallet address is added while adding the policy.", + "vali_addPolicy_4": "The respective validation is not displayed when the wallet address is not added while adding the policy.", + "vali_addPolicy_5": "The respective validation is not displayed when the wallet address is added as an empty string while adding the policy.", + "vali_addPolicy_6": "The respective validation is not displayed when the wallet address is added as only blank space while adding the policy.", + "vali_addPolicy_7": "The respective validation is not displayed when the name is not added while adding the policy.", + "vali_addPolicy_8": "The respective validation is not displayed when the name is added as an empty string while adding the policy.", + "vali_addPolicy_9": "The respective validation is not displayed when the name is added as only blank space while adding the policy.", + "vali_addPolicy_10": "The respective validation is not displayed when the description is not added while adding the policy.", + "vali_addPolicy_11": "The respective validation is not displayed when the description is added as an empty string while adding the policy.", + "vali_addPolicy_12": "The respective validation is not displayed when the description is added as only blank space while adding the policy.", + "vali_addPolicy_13": "The respective validation is not displayed when the EPVersion is not added while adding the policy.", + "vali_addPolicy_14": "The respective validation is not displayed when an invalid EPVersion is added while adding the policy.", + "vali_addPolicy_15": "The respective validation is not displayed when the EPVersion is added with an empty array while adding the policy.", + "vali_addPolicy_16": "The respective validation is not displayed when an invalid API key is added while adding the policy.", + "vali_addPolicy_17": "The respective validation is not displayed when an incorrect API key is added while adding the policy.", + "vali_addPolicy_18": "The respective validation is not displayed when the API key is not added while adding the policy.", + "fail_addPolicy_1": "An error message is displayed in the add policy API.", + "fail_addPolicy_2": "An error message is displayed when an invalid wallet address is added while adding the policy", + "fail_addPolicy_3": "An error message is displayed when an incorrect wallet address is added while adding the policy.", + "fail_addPolicy_4": "An error message is displayed when the wallet address is not added while adding the policy.", + "fail_addPolicy_5": "An error message is displayed when the wallet address is added as an empty string while adding the policy.", + "fail_addPolicy_6": "An error message is displayed when the wallet address is added as only blank space while adding the policy.", + "fail_addPolicy_7": "An error message is displayed when the name is not added while adding the policy.", + "fail_addPolicy_8": "An error message is displayed when the name is added as an empty string while adding the policy.", + "fail_addPolicy_9": "An error message is displayed when the name is added as only blank space while adding the policy.", + "fail_addPolicy_10": "An error message is displayed when the description is not added while adding the policy.", + "fail_addPolicy_11": "An error message is displayed when the description is added as an empty string while adding the policy.", + "fail_addPolicy_12": "An error message is displayed when the description is added as only blank space while adding the policy.", + "fail_addPolicy_13": "An error message is displayed when the EPVersion is not added while adding the policy.", + "fail_addPolicy_14": "An error message is displayed when an invalid EPVersion is added while adding the policy.", + "fail_addPolicy_15": "An error message is displayed when the EPVersion added with an empty array while adding the policy.", + "fail_addPolicy_16": "An error message is displayed when an invalid API key is added while adding the policy.", + "fail_addPolicy_17": "An error message is displayed when an incorrect API key is added while adding the policy.", + "fail_addPolicy_18": "An error message is displayed when API key is not added while adding the policy.", + "vali_updatePolicy_id": "The value of id is not displayed as expected in the update policy response.", + "vali_updatePolicy_walletAddress": "The value of wallet address is not displayed as expected in the update policy response.", + "vali_updatePolicy_name": "The value of name is not displayed as expected in the update policy response.", + "vali_updatePolicy_description": "The value of description is not displayed as expected in the update policy response.", + "vali_updatePolicy_isPublic": "The value of isPublic is not displayed as expected in the update policy response.", + "vali_updatePolicy_isEnabled": "The value of isEnabled is not displayed as expected in the update policy response.", + "vali_updatePolicy_isApplicableToAllNetworks": "The value of isApplicableToAllNetworks is not displayed as expected in the update policy response.", + "vali_updatePolicy_enabledChains": "The value of enabledChains is not displayed as expected in the update policy response.", + "vali_updatePolicy_supportedEPVersions": "The value of supportedEPVersions is not displayed as expected in the update policy response.", + "vali_updatePolicy_isPerpetual": "The value of isPerpetual is not displayed as expected in the update policy response.", + "vali_updatePolicy_globalMaximumApplicable": "The value of globalMaximumApplicable is not displayed as expected in the update policy response.", + "vali_updatePolicy_globalMaximumUsd": "The value of globalMaximumUsd is not displayed as expected in the update policy response.", + "vali_updatePolicy_globalMaximumNative": "The value of globalMaximumNative is not displayed as expected in the update policy response.", + "vali_updatePolicy_globalMaximumOpCount": "The value of globalMaximumOpCount is not displayed as expected in the update policy response.", + "vali_updatePolicy_perUserMaximumApplicable": "The value of perUserMaximumApplicable is not displayed as expected in the update policy response.", + "vali_updatePolicy_perUserMaximumUsd": "The value of perUserMaximumUsd is not displayed as expected in the update policy response.", + "vali_updatePolicy_perUserMaximumNative": "The value of perUserMaximumNative is not displayed as expected in the update policy response.", + "vali_updatePolicy_perUserMaximumOpCount": "The value of perUserMaximumOpCount is not displayed as expected in the update policy response.", + "vali_updatePolicy_perOpMaximumApplicable": "The value of perOpMaximumApplicable is not displayed as expected in the update policy response.", + "vali_updatePolicy_perOpMaximumUsd": "The value of perOpMaximumUsd is not displayed as expected in the update policy response.", + "vali_updatePolicy_perOpMaximumNative": "The value of perOpMaximumNative is not displayed as expected in the update policy response.", + "vali_updatePolicy_addressAllowList": "The value of addressAllowList is not displayed as expected in the update policy response.", + "vali_updatePolicy_addressBlockList": "The value of addressBlockList is not displayed as expected in the update policy response.", + "vali_updatePolicy_2": "The respective validation is not displayed when an invalid wallet address is added while updating the policy.", + "vali_updatePolicy_3": "The respective validation is not displayed when an incorrect wallet address is added while updating the policy.", + "vali_updatePolicy_4": "The respective validation is not displayed when the wallet address is not added while updating the policy.", + "vali_updatePolicy_5": "The respective validation is not displayed when the wallet address is added as an empty string while updating the policy.", + "vali_updatePolicy_6": "The respective validation is not displayed when the wallet address is added as only blank space while updating the policy.", + "vali_updatePolicy_7": "The respective validation is not displayed when the name is not added while updating the policy.", + "vali_updatePolicy_8": "The respective validation is not displayed when the name is added as an empty string while updating the policy.", + "vali_updatePolicy_9": "The respective validation is not displayed when the name is added as only blank space while updating the policy.", + "vali_updatePolicy_10": "The respective validation is not displayed when the description is not added while updating the policy.", + "vali_updatePolicy_11": "The respective validation is not displayed when the description is added as an empty string while updating the policy.", + "vali_updatePolicy_12": "The respective validation is not displayed when the description is added as only blank space while updating the policy.", + "vali_updatePolicy_13": "The respective validation is not displayed when the EPVersion is not added while updating the policy.", + "vali_updatePolicy_14": "The respective validation is not displayed when an invalid EPVersion is added while updating the policy.", + "vali_updatePolicy_15": "The respective validation is not displayed when the EPVersion is added with an empty array while updating the policy.", + "vali_updatePolicy_16": "The respective validation is not displayed when an invalid id is added while updating the policy.", + "vali_updatePolicy_17": "The respective validation is not displayed when an incorrect id is added while updating the policy.", + "vali_updatePolicy_18": "The respective validation is not displayed when the id is not added while updating the policy.", + "vali_updatePolicy_19": "The respective validation is not displayed when zero value id is added while updating the policy.", + "vali_updatePolicy_20": "The respective validation is not displayed when negative value id is added while updating the policy.", + "vali_updatePolicy_21": "The respective validation is not displayed when an invalid API key is added while updating the policy.", + "vali_updatePolicy_22": "The respective validation is not displayed when an incorrect API key is added while updating the policy.", + "vali_updatePolicy_23": "The respective validation is not displayed when API key is not added while updating the policy.", + "fail_updatePolicy_1": "An error message is displayed in the update policy API.", + "fail_updatePolicy_2": "An error message is displayed when an invalid wallet address is added while updating the policy", + "fail_updatePolicy_3": "An error message is displayed when an incorrect wallet address is added while updating the policy.", + "fail_updatePolicy_4": "An error message is displayed when the wallet address is not added while updating the policy.", + "fail_updatePolicy_5": "An error message is displayed when the wallet address is added as an empty string while updating the policy.", + "fail_updatePolicy_6": "An error message is displayed when the wallet address is added as only blank space while updating the policy.", + "fail_updatePolicy_7": "An error message is displayed when the name is not added while updating the policy.", + "fail_updatePolicy_8": "An error message is displayed when the name is added as an empty string while updating the policy.", + "fail_updatePolicy_9": "An error message is displayed when the name is added as only blank space while updating the policy.", + "fail_updatePolicy_10": "An error message is displayed when the description is not added while updating the policy.", + "fail_updatePolicy_11": "An error message is displayed when the description is added as an empty string while updating the policy.", + "fail_updatePolicy_12": "An error message is displayed when the description is added as only blank space while updating the policy.", + "fail_updatePolicy_13": "An error message is displayed when the EPVersion is not added while updating the policy.", + "fail_updatePolicy_14": "An error message is displayed when an invalid EPVersion is added while updating the policy.", + "fail_updatePolicy_15": "An error message is displayed when the EPVersion is added with an empty array while updating the policy.", + "fail_updatePolicy_16": "An error message is displayed when an invalid id is added while updating the policy", + "fail_updatePolicy_17": "An error message is displayed when an incorrect id is added while updating the policy.", + "fail_updatePolicy_18": "An error message is displayed when the id is not added while updating the policy.", + "fail_updatePolicy_19": "An error message is displayed when zero value id is added while updating the policy.", + "fail_updatePolicy_20": "An error message is displayed when negative value id is added while updating the policy.", + "fail_updatePolicy_21": "An error message is displayed when an invalid API key is added while updating the policy", + "fail_updatePolicy_22": "An error message is displayed when an incorrect API key is added while updating the policy.", + "fail_updatePolicy_23": "An error message is displayed when API key is not added while updating the policy.", + "vali_deletePolicy_message": "The respective message is not displayed as expected in the delete policy response.", + "vali_deletePolicy_1": "The respective validation is displayed when the id is not added while deleting the policy.", + "vali_deletePolicy_2": "The respective validation is not displayed when an invalid id is added while deleting the policy.", + "vali_deletePolicy_3": "The respective validation is not displayed when an incorrect id is added while deleting the policy.", + "vali_deletePolicy_4": "The respective validation is not displayed when the id is not added while deleting the policy.", + "vali_deletePolicy_5": "The respective validation is not displayed when zero value id is added while deleting the policy.", + "vali_deletePolicy_6": "The respective validation is not displayed when negative value id is added while deleting the policy.", + "vali_deletePolicy_7": "The respective validation is not displayed when an invalid API key is added while deleting the policy.", + "vali_deletePolicy_8": "The respective validation is not displayed when an incorrect API key is added while deleting the policy.", + "vali_deletePolicy_9": "The respective validation is not displayed when API key is not added while deleting the policy.", + "fail_deletePolicy_1": "An error message is displayed in the delete policy API.", + "fail_deletePolicy_2": "An error message is displayed when an invalid id is added while deleting the policy", + "fail_deletePolicy_3": "An error message is displayed when an incorrect id is added while deleting the policy.", + "fail_deletePolicy_4": "An error message is displayed when the id is not added while deleting the policy.", + "fail_deletePolicy_5": "An error message is displayed when zero value id is added while deleting the policy.", + "fail_deletePolicy_6": "An error message is displayed when negative value id is added while deleting the policy.", + "fail_deletePolicy_7": "An error message is displayed when an invalid API key is added while deleting the policy", + "fail_deletePolicy_8": "An error message is displayed when an incorrect API key is added while deleting the policy.", + "fail_deletePolicy_9": "An error message is displayed when API key is not added while deleting the policy.", + "vali_enablePolicy_message": "The respective message is not displayed as expected in the enable policy response.", + "vali_enablePolicy_2": "The respective validation is not displayed when an invalid id is added while enabling the policy.", + "vali_enablePolicy_3": "The respective validation is not displayed when an incorrect id is added while enabling the policy.", + "vali_enablePolicy_4": "The respective validation is not displayed when the id is not added while enabling the policy.", + "vali_enablePolicy_5": "The respective validation is not displayed when zero value id is added while enabling the policy.", + "vali_enablePolicy_6": "The respective validation is not displayed when negative value id is added while enabling the policy.", + "vali_enablePolicy_7": "The respective validation is not displayed when an invalid API key is added while enabling the policy.", + "vali_enablePolicy_8": "The respective validation is not displayed when an incorrect API key is added while enabling the policy.", + "vali_enablePolicy_9": "The respective validation is not displayed when API key is not added while enabling the policy.", + "vali_enablePolicy_10": "The respective validation is not displayed when policy is already enabled.", + "fail_enablePolicy_1": "An error message is displayed in the enable policy API.", + "fail_enablePolicy_2": "An error message is displayed when an invalid id is added while enabling the policy", + "fail_enablePolicy_3": "An error message is displayed when an incorrect id is added while enabling the policy.", + "fail_enablePolicy_4": "An error message is displayed when the id is not added while enabling the policy.", + "fail_enablePolicy_5": "An error message is displayed when zero value id is added while enabling the policy.", + "fail_enablePolicy_6": "An error message is displayed when negative value id is added while enabling the policy.", + "fail_enablePolicy_7": "An error message is displayed when an invalid API key is added while enabling the policy", + "fail_enablePolicy_8": "An error message is displayed when an incorrect API key is added while enabling the policy.", + "fail_enablePolicy_9": "An error message is displayed when API key is not added while enabling the policy.", + "fail_enablePolicy_10": "An error message is displayed when enabling already enabled policy.", + "vali_disablePolicy_message": "The respective message is not displayed as expected in the disable policy response.", + "vali_disablePolicy_2": "The respective validation is not displayed when an invalid id is added while disabling the policy.", + "vali_disablePolicy_3": "The respective validation is not displayed when an incorrect id is added while disabling the policy.", + "vali_disablePolicy_4": "The respective validation is not displayed when the id is not added while disabling the policy.", + "vali_disablePolicy_5": "The respective validation is not displayed when zero value id is added while disabling the policy.", + "vali_disablePolicy_6": "The respective validation is not displayed when negative value id is added while disabling the policy.", + "vali_disablePolicy_7": "The respective validation is not displayed when an invalid API key is added while disabling the policy.", + "vali_disablePolicy_8": "The respective validation is not displayed when an incorrect API key is added while disabling the policy.", + "vali_disablePolicy_9": "The respective validation is not displayed when API key is not added while disabling the policy.", + "vali_disablePolicy_10": "The respective validation is not displayed when policy is already disabled.", + "fail_disablePolicy_1": "An error message is displayed in the disable policy API.", + "fail_disablePolicy_2": "An error message is displayed when an invalid id is added while disabling the policy", + "fail_disablePolicy_3": "An error message is displayed when an incorrect id is added while disabling the policy.", + "fail_disablePolicy_4": "An error message is displayed when the id is not added while disabling the policy.", + "fail_disablePolicy_5": "An error message is displayed when zero value id is added while disabling the policy.", + "fail_disablePolicy_6": "An error message is displayed when negative value id is added while disabling the policy.", + "fail_disablePolicy_7": "An error message is displayed when an invalid API key is added while disabling the policy", + "fail_disablePolicy_8": "An error message is displayed when an incorrect API key is added while disabling the policy.", + "fail_disablePolicy_9": "An error message is displayed when API key is not added while disabling the policy.", + "fail_disablePolicy_10": "An error message is displayed when disabling already disabled policy.", + "vali_policy_walletAddress": "The wallet address is not displayed as expected in the policy response.", + "vali_policy_name": "The name is not displayed as expected in the policy response.", + "vali_policy_description": "The description is not displayed as expected in the policy response.", + "vali_policy_id": "The description is not displayed as expected in the policy response.", + "vali_policy_2": "The respective validation is not displayed when an invalid API key is added while fetching the policy.", + "vali_policy_3": "The respective validation is not displayed when an incorrect API key is added while fetching the policy.", + "vali_policy_4": "The respective validation is not displayed when API key is not added while fetching the policy.", + "fail_policy_1": "An error message is displayed in the policy API.", + "fail_policy_2": "An error message is displayed when an invalid API key is added while fetching the policy", + "fail_policy_3": "An error message is displayed when an incorrect API key is added while fetching the policy.", + "fail_policy_4": "An error message is displayed when API key is not added while fetching the policy.", + "vali_policyId_walletAddress": "The wallet address is not displayed as expected in the policy of a particular id response.", + "vali_policyId_name": "The name is not displayed as expected in the policy of a particular id response.", + "vali_policyId_description": "The description is not displayed as expected in the policy of a particular id response.", + "vali_policyId_id": "The description is not displayed as expected in the policy of a particular id response.", + "vali_policyId_2": "The respective validation is not displayed when an invalid id is added while fetching the policy of a particular id.", + "vali_policyId_3": "The respective validation is not displayed when an incorrect id is added while fetching the policy of a particular id.", + "vali_policyId_4": "The respective validation is not displayed when the id is not added while fetching the policy of a particular id.", + "vali_policyId_5": "The respective validation is not displayed when zero value id is added while fetching the policy of a particular id.", + "vali_policyId_6": "The respective validation is not displayed when negative value id is added while fetching the policy of a particular id.", + "vali_policyId_7": "The respective validation is not displayed when an invalid API key is added while fetching the policy of a particular id.", + "vali_policyId_8": "The respective validation is not displayed when an incorrect API key is added while fetching the policy of a particular id.", + "vali_policyId_9": "The respective validation is not displayed when API key is not added while fetching the policy of a particular id.", + "fail_policyId_1": "An error message is displayed in the policy of a particular id API.", + "fail_policyId_2": "An error message is displayed when an invalid id is added while fetching the policy of a particular id", + "fail_policyId_3": "An error message is displayed when an incorrect id is added while fetching the policy of a particular id.", + "fail_policyId_4": "An error message is displayed when the id is not added while fetching the policy of a particular id.", + "fail_policyId_5": "An error message is displayed when zero value id is added while fetching the policy of a particular id.", + "fail_policyId_6": "An error message is displayed when negative value id is added while fetching the policy of a particular id.", + "fail_policyId_7": "An error message is displayed when an invalid API key is added while fetching the policy of a particular id.", + "fail_policyId_8": "An error message is displayed when an incorrect API key is added while fetching the policy of a particular id.", + "fail_policyId_9": "An error message is displayed when API key is not added while fetching the policy of a particular id.", + "vali_policyWalletAddress_walletAddress": "The wallet address is not displayed as expected in the policy of a particular wallet address's response.", + "vali_policyWalletAddress_name": "The name is not displayed as expected in the policy of a particular wallet address's response.", + "vali_policyWalletAddress_description": "The description is not displayed as expected in the policy of a particular wallet address's response.", + "vali_policyWalletAddress_id": "The description is not displayed as expected in the policy of a particular wallet address's response.", + "vali_policyWalletAddress_2": "The respective validation is not displayed when an invalid wallet address is added while fetching the policy of a particular wallet address.", + "vali_policyWalletAddress_3": "The respective validation is not displayed when an incorrect wallet address is added while fetching the policy of a particular wallet address.", + "vali_policyWalletAddress_4": "The respective validation is not displayed when the wallet address is not added while fetching the policy of a particular wallet address.", + "vali_policyWalletAddress_5": "The respective validation is not displayed when an invalid API key is added while fetching the policy of a particular wallet address.", + "vali_policyWalletAddress_6": "The respective validation is not displayed when an incorrect API key is added while fetching the policy of a particular wallet address.", + "vali_policyWalletAddress_7": "The respective validation is not displayed when API key is not added while fetching the policy of a particular wallet address.", + "fail_policyWalletAddress_1": "An error message is displayed in the policy of a particular wallet address's API.", + "fail_policyWalletAddress_2": "An error message is displayed when an invalid wallet address is added while fetching the policy of a particular wallet address", + "fail_policyWalletAddress_3": "An error message is displayed when an incorrect wallet address is added while fetching the policy of a particular wallet address.", + "fail_policyWalletAddress_4": "An error message is displayed when the wallet address is not added while fetching the policy of a particular wallet address.", + "fail_policyWalletAddress_5": "An error message is displayed when an invalid API key is added while fetching the policy of a particular wallet address", + "fail_policyWalletAddress_6": "An error message is displayed when an incorrect API key is added while fetching the policy of a particular wallet address.", + "fail_policyWalletAddress_7": "An error message is displayed when API key is not added while fetching the policy of a particular wallet address.", + "vali_policyWalletAddressandEPVersion_walletAddress": "The wallet address is not displayed as expected in the policy of a particular wallet address and entry point version's response.", + "vali_policyWalletAddressandEPVersion_name": "The name is not displayed as expected in the policy of a particular wallet address and entry point version's response.", + "vali_policyWalletAddressandEPVersion_description": "The description is not displayed as expected in the policy of a particular wallet address and entry point version's response.", + "vali_policyWalletAddressandEPVersion_id": "The description is not displayed as expected in the policy of a particular wallet address and entry point version's response.", + "vali_policyWalletAddressandEPVersion_2": "The respective validation is not displayed when an invalid wallet address is added while fetching the policy of a particular wallet address and entry point version.", + "vali_policyWalletAddressandEPVersion_3": "The respective validation is not displayed when an incorrect wallet address is added while fetching the policy of a particular wallet address and entry point version.", + "vali_policyWalletAddressandEPVersion_4": "The respective validation is not displayed when the wallet address is not added while fetching the policy of a particular wallet address and entry point version.", + "vali_policyWalletAddressandEPVersion_5": "The respective validation is not displayed when an invalid entry point version is added while fetching the policy of a particular wallet address and entry point version.", + "vali_policyWalletAddressandEPVersion_6": "The respective validation is not displayed when an incorrect entry point version is added while fetching the policy of a particular wallet address and entry point version.", + "vali_policyWalletAddressandEPVersion_7": "The respective validation is not displayed when entry point version is not added while fetching the policy of a particular wallet address and entry point version.", + "vali_policyWalletAddressandEPVersion_8": "The respective validation is not displayed when the wallet address and entry point version is not added while fetching the policy of a particular wallet address and entry point version.", + "vali_policyWalletAddressandEPVersion_9": "The respective validation is not displayed when an invalid API key is added while fetching the policy of a particular wallet address and entry point version.", + "vali_policyWalletAddressandEPVersion_10": "The respective validation is not displayed when an incorrect API key is added while fetching the policy of a particular wallet address and entry point version.", + "vali_policyWalletAddressandEPVersion_11": "The respective validation is not displayed when API key is not added while fetching the policy of a particular wallet address and entry point version.", + "fail_policyWalletAddressandEPVersion_1": "An error message is displayed in the policy of a particular wallet address and entry point version's API.", + "fail_policyWalletAddressandEPVersion_2": "An error message is displayed when an invalid wallet address is added while fetching the policy of a particular wallet address and entry point version", + "fail_policyWalletAddressandEPVersion_3": "An error message is displayed when an incorrect wallet address is added while fetching the policy of a particular wallet address and entry point version.", + "fail_policyWalletAddressandEPVersion_4": "An error message is displayed when the wallet address is not added while fetching the policy of a particular wallet address and entry point version.", + "fail_policyWalletAddressandEPVersion_5": "An error message is displayed when an invalid entry point version is added while fetching the policy of a particular wallet address and entry point version", + "fail_policyWalletAddressandEPVersion_6": "An error message is displayed when an incorrect entry point version is added while fetching the policy of a particular wallet address and entry point version.", + "fail_policyWalletAddressandEPVersion_7": "An error message is displayed when entry point version is not added while fetching the policy of a particular wallet address and entry point version.", + "fail_policyWalletAddressandEPVersion_8": "An error message is displayed when the wallet address and entry point version is not added while fetching the policy of a particular wallet address and entry point version.", + "fail_policyWalletAddressandEPVersion_9": "An error message is displayed when an invalid API key is added while fetching the policy of a particular wallet address and entry point version", + "fail_policyWalletAddressandEPVersion_10": "An error message is displayed when an incorrect API key is added while fetching the policy of a particular wallet address and entry point version.", + "fail_policyWalletAddressandEPVersion_11": "An error message is displayed when API key is not added while fetching the policy of a particular wallet address and entry point version.", + "vali_policyWalletAddressEPVersionandChainid_walletAddress": "The wallet address is not displayed as expected in the policy of a particular wallet address, entry point version and chain id response.", + "vali_policyWalletAddressEPVersionandChainid_name": "The name is not displayed as expected in the policy of a particular wallet address, entry point version and chain id response.", + "vali_policyWalletAddressEPVersionandChainid_description": "The description is not displayed as expected in the policy of a particular wallet address, entry point version and chain id response.", + "vali_policyWalletAddressEPVersionandChainid_id": "The description is not displayed as expected in the policy of a particular wallet address, entry point version and chain id response.", + "vali_policyWalletAddressEPVersionandChainid_2": "The respective validation is not displayed when an invalid wallet address is added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_policyWalletAddressEPVersionandChainid_3": "The respective validation is not displayed when an incorrect wallet address is added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_policyWalletAddressEPVersionandChainid_4": "The respective validation is not displayed when the wallet address is not added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_policyWalletAddressEPVersionandChainid_5": "The respective validation is not displayed when an invalid entry point version is added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_policyWalletAddressEPVersionandChainid_6": "The respective validation is not displayed when an incorrect entry point version is added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_policyWalletAddressEPVersionandChainid_7": "The respective validation is not displayed when entry point version is not added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_policyWalletAddressEPVersionandChainid_8": "The respective validation is not displayed when an invalid chainId is added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_policyWalletAddressEPVersionandChainid_9": "The respective validation is not displayed when an incorrect chainId is added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_policyWalletAddressEPVersionandChainid_10": "The respective validation is not displayed when chainId is not added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_policyWalletAddressEPVersionandChainid_11": "The respective validation is not displayed when the wallet address, entry point version and chain id is not added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_policyWalletAddressEPVersionandChainid_12": "The respective validation is not displayed when an invalid API key is added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_policyWalletAddressEPVersionandChainid_13": "The respective validation is not displayed when an incorrect API key is added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_policyWalletAddressEPVersionandChainid_14": "The respective validation is not displayed when API key is not added while fetching the policy of a particular wallet address, entry point version and chain id.", + "fail_policyWalletAddressEPVersionandChainid_1": "An error message is displayed in the policy of a particular wallet address, entry point version and chain id API.", + "fail_policyWalletAddressEPVersionandChainid_2": "An error message is displayed when an invalid wallet address is added while fetching the policy of a particular wallet address, entry point version and chain id's", + "fail_policyWalletAddressEPVersionandChainid_3": "An error message is displayed when an incorrect wallet address is added while fetching the policy of a particular wallet address, entry point version and chain id.", + "fail_policyWalletAddressEPVersionandChainid_4": "An error message is displayed when the wallet address is not added while fetching the policy of a particular wallet address, entry point version and chain id.", + "fail_policyWalletAddressEPVersionandChainid_5": "An error message is displayed when an invalid entry point version is added while fetching the policy of a particular wallet address, entry point version and chain id's", + "fail_policyWalletAddressEPVersionandChainid_6": "An error message is displayed when an incorrect entry point version is added while fetching the policy of a particular wallet address, entry point version and chain id.", + "fail_policyWalletAddressEPVersionandChainid_7": "An error message is displayed when entry point version is not added while fetching the policy of a particular wallet address, entry point version and chain id.", + "fail_policyWalletAddressEPVersionandChainid_8": "An error message is displayed when an invalid chainId is added while fetching the policy of a particular wallet address, entry point version and chain id's", + "fail_policyWalletAddressEPVersionandChainid_9": "An error message is displayed when an incorrect chainId is added while fetching the policy of a particular wallet address, entry point version and chain id.", + "fail_policyWalletAddressEPVersionandChainid_10": "An error message is displayed when chainId is not added while fetching the policy of a particular wallet address, entry point version and chain id.", + "fail_policyWalletAddressEPVersionandChainid_11": "An error message is displayed when the wallet address, entry point version and chain id is not added while fetching the policy of a particular wallet address, entry point version and chain id.", + "fail_policyWalletAddressEPVersionandChainid_12": "An error message is displayed when an invalid API key is added while fetching the policy of a particular wallet address, entry point version and chain id's", + "fail_policyWalletAddressEPVersionandChainid_13": "An error message is displayed when an incorrect API key is added while fetching the policy of a particular wallet address, entry point version and chain id.", + "fail_policyWalletAddressEPVersionandChainid_14": "An error message is displayed when API key is not added while fetching the policy of a particular wallet address, entry point version and chain id.", + "vali_latestPolicyWalletAddress_walletAddress": "The wallet address is not displayed as expected in the latest policy of a particular wallet address's response.", + "vali_latestPolicyWalletAddress_name": "The name is not displayed as expected in the latest policy of a particular wallet address's response.", + "vali_latestPolicyWalletAddress_description": "The description is not displayed as expected in the latest policy of a particular wallet address's response.", + "vali_latestPolicyWalletAddress_id": "The description is not displayed as expected in the latest policy of a particular wallet address's response.", + "vali_latestPolicyWalletAddress_2": "The respective validation is not displayed when an invalid wallet address is added while fetching the latest policy of a particular wallet address.", + "vali_latestPolicyWalletAddress_3": "The respective validation is not displayed when an incorrect wallet address is added while fetching the latest policy of a particular wallet address.", + "vali_latestPolicyWalletAddress_4": "The respective validation is not displayed when the wallet address is not added while fetching the latest policy of a particular wallet address.", + "vali_latestPolicyWalletAddress_5": "The respective validation is not displayed when an invalid API key is added while fetching the latest policy of a particular wallet address.", + "vali_latestPolicyWalletAddress_6": "The respective validation is not displayed when an incorrect API key is added while fetching the latest policy of a particular wallet address.", + "vali_latestPolicyWalletAddress_7": "The respective validation is not displayed when API key is not added while fetching the latest policy of a particular wallet address.", + "fail_latestPolicyWalletAddress_1": "An error message is displayed in the latest policy of a particular wallet address's API.", + "fail_latestPolicyWalletAddress_2": "An error message is displayed when an invalid wallet address is added while fetching the latest policy of a particular wallet address", + "fail_latestPolicyWalletAddress_3": "An error message is displayed when an incorrect wallet address is added while fetching the latest policy of a particular wallet address.", + "fail_latestPolicyWalletAddress_4": "An error message is displayed when the wallet address is not added while fetching the latest policy of a particular wallet address.", + "fail_latestPolicyWalletAddress_5": "An error message is displayed when an invalid API key is added while fetching the latest policy of a particular wallet address", + "fail_latestPolicyWalletAddress_6": "An error message is displayed when an incorrect API key is added while fetching the latest policy of a particular wallet address.", + "fail_latestPolicyWalletAddress_7": "An error message is displayed when API key is not added while fetching the latest policy of a particular wallet address.", + "vali_latestPolicyWalletAddressandEPVersion_walletAddress": "The wallet address is not displayed as expected in the latest policy of a particular wallet address and entry point version's response.", + "vali_latestPolicyWalletAddressandEPVersion_name": "The name is not displayed as expected in the latest policy of a particular wallet address and entry point version's response.", + "vali_latestPolicyWalletAddressandEPVersion_description": "The description is not displayed as expected in the latest policy of a particular wallet address and entry point version's response.", + "vali_latestPolicyWalletAddressandEPVersion_id": "The description is not displayed as expected in the latest policy of a particular wallet address and entry point version's response.", + "vali_latestPolicyWalletAddressandEPVersion_2": "The respective validation is not displayed when an invalid wallet address is added while fetching the latest policy of a particular wallet address and entry point version.", + "vali_latestPolicyWalletAddressandEPVersion_3": "The respective validation is not displayed when an incorrect wallet address is added while fetching the latest policy of a particular wallet address and entry point version.", + "vali_latestPolicyWalletAddressandEPVersion_4": "The respective validation is not displayed when the wallet address is not added while fetching the latest policy of a particular wallet address and entry point version.", + "vali_latestPolicyWalletAddressandEPVersion_5": "The respective validation is not displayed when an invalid entry point version is added while fetching the latest policy of a particular wallet address and entry point version.", + "vali_latestPolicyWalletAddressandEPVersion_6": "The respective validation is not displayed when an incorrect entry point version is added while fetching the latest policy of a particular wallet address and entry point version.", + "vali_latestPolicyWalletAddressandEPVersion_7": "The respective validation is not displayed when entry point version is not added while fetching the latest policy of a particular wallet address and entry point version.", + "vali_latestPolicyWalletAddressandEPVersion_8": "The respective validation is not displayed when the wallet address and entry point version is not added while fetching the latest policy of a particular wallet address and entry point version.", + "vali_latestPolicyWalletAddressandEPVersion_9": "The respective validation is not displayed when an invalid API key is added while fetching the latest policy of a particular wallet address and entry point version.", + "vali_latestPolicyWalletAddressandEPVersion_10": "The respective validation is not displayed when an incorrect API key is added while fetching the latest policy of a particular wallet address and entry point version.", + "vali_latestPolicyWalletAddressandEPVersion_11": "The respective validation is not displayed when API key is not added while fetching the latest policy of a particular wallet address and entry point version.", + "fail_latestPolicyWalletAddressandEPVersion_1": "An error message is displayed in the latest policy of a particular wallet address and entry point version's API.", + "fail_latestPolicyWalletAddressandEPVersion_2": "An error message is displayed when an invalid wallet address is added while fetching the latest policy of a particular wallet address and entry point version", + "fail_latestPolicyWalletAddressandEPVersion_3": "An error message is displayed when an incorrect wallet address is added while fetching the latest policy of a particular wallet address and entry point version.", + "fail_latestPolicyWalletAddressandEPVersion_4": "An error message is displayed when the wallet address is not added while fetching the latest policy of a particular wallet address and entry point version.", + "fail_latestPolicyWalletAddressandEPVersion_5": "An error message is displayed when an invalid entry point version is added while fetching the latest policy of a particular wallet address and entry point version", + "fail_latestPolicyWalletAddressandEPVersion_6": "An error message is displayed when an incorrect entry point version is added while fetching the latest policy of a particular wallet address and entry point version.", + "fail_latestPolicyWalletAddressandEPVersion_7": "An error message is displayed when entry point version is not added while fetching the latest policy of a particular wallet address and entry point version.", + "fail_latestPolicyWalletAddressandEPVersion_8": "An error message is displayed when the wallet address and entry point version is not added while fetching the latest policy of a particular wallet address and entry point version.", + "fail_latestPolicyWalletAddressandEPVersion_9": "An error message is displayed when an invalid API key is added while fetching the latest policy of a particular wallet address and entry point version.", + "fail_latestPolicyWalletAddressandEPVersion_10": "An error message is displayed when an incorrect API key is added while fetching the latest policy of a particular wallet address and entry point version.", + "fail_latestPolicyWalletAddressandEPVersion_11": "An error message is displayed when API key is not added while fetching the latest policy of a particular wallet address and entry point version.", + "vali_latestPolicyWalletAddressandChainid_walletAddress": "The wallet address is not displayed as expected in the latest policy of a particular wallet address and chain id response.", + "vali_latestPolicyWalletAddressandChainid_name": "The name is not displayed as expected in the latest policy of a particular wallet address and chain id response.", + "vali_latestPolicyWalletAddressandChainid_description": "The description is not displayed as expected in the latest policy of a particular wallet address and chain id response.", + "vali_latestPolicyWalletAddressandChainid_id": "The description is not displayed as expected in the latest policy of a particular wallet address and chain id response.", + "vali_latestPolicyWalletAddressandChainid_2": "The respective validation is not displayed when an invalid wallet address is added while fetching the latest policy of a particular wallet address and chain id.", + "vali_latestPolicyWalletAddressandChainid_3": "The respective validation is not displayed when an incorrect wallet address is added while fetching the latest policy of a particular wallet address and chain id.", + "vali_latestPolicyWalletAddressandChainid_4": "The respective validation is not displayed when the wallet address is not added while fetching the latest policy of a particular wallet address and chain id.", + "vali_latestPolicyWalletAddressandChainid_5": "The respective validation is not displayed when an invalid chainId is added while fetching the latest policy of a particular wallet address and chain id.", + "vali_latestPolicyWalletAddressandChainid_6": "The respective validation is not displayed when an incorrect chainId is added while fetching the latest policy of a particular wallet address and chain id.", + "vali_latestPolicyWalletAddressandChainid_7": "The respective validation is not displayed when chainId is not added while fetching the latest policy of a particular wallet address and chain id.", + "vali_latestPolicyWalletAddressandChainid_8": "The respective validation is not displayed when the wallet address and chainId is not added while fetching the latest policy of a particular wallet address and chain id.", + "vali_latestPolicyWalletAddressandChainid_9": "The respective validation is not displayed when an invalid API key is added while fetching the latest policy of a particular wallet address and chain id.", + "vali_latestPolicyWalletAddressandChainid_10": "The respective validation is not displayed when an incorrect API key is added while fetching the latest policy of a particular wallet address and chain id.", + "vali_latestPolicyWalletAddressandChainid_11": "The respective validation is not displayed when API key is not added while fetching the latest policy of a particular wallet address and chain id.", + "fail_latestPolicyWalletAddressandChainid_1": "An error message is displayed in the latest policy of a particular wallet address and chain id API.", + "fail_latestPolicyWalletAddressandChainid_2": "An error message is displayed when an invalid wallet address is added while fetching the latest policy of a particular wallet address and chain id", + "fail_latestPolicyWalletAddressandChainid_3": "An error message is displayed when an incorrect wallet address is added while fetching the latest policy of a particular wallet address and chain id.", + "fail_latestPolicyWalletAddressandChainid_4": "An error message is displayed when the wallet address is not added while fetching the latest policy of a particular wallet address and chain id.", + "fail_latestPolicyWalletAddressandChainid_5": "An error message is displayed when an invalid chainId is added while fetching the latest policy of a particular wallet address and chain id", + "fail_latestPolicyWalletAddressandChainid_6": "An error message is displayed when an incorrect chainId is added while fetching the latest policy of a particular wallet address and chain id.", + "fail_latestPolicyWalletAddressandChainid_7": "An error message is displayed when chainId is not added while fetching the latest policy of a particular wallet address and chain id.", + "fail_latestPolicyWalletAddressandChainid_8": "An error message is displayed when the wallet address and chainId is not added while fetching the latest policy of a particular wallet address and chain id.", + "fail_latestPolicyWalletAddressandChainid_9": "An error message is displayed when an invalid API key is added while fetching the latest policy of a particular wallet address and chain id", + "fail_latestPolicyWalletAddressandChainid_10": "An error message is displayed when an incorrect API key is added while fetching the latest policy of a particular wallet address and chain id.", + "fail_latestPolicyWalletAddressandChainid_11": "An error message is displayed when API key is not added while fetching the latest policy of a particular wallet address and chain id.", + "vali_latestPolicyWalletAddressEPVersionandChainid_walletAddress": "The wallet address is not displayed as expected in the latest policy of a particular wallet address, entry point version and chain id response.", + "vali_latestPolicyWalletAddressEPVersionandChainid_name": "The name is not displayed as expected in the latest policy of a particular wallet address, entry point version and chain id response.", + "vali_latestPolicyWalletAddressEPVersionandChainid_description": "The description is not displayed as expected in the latest policy of a particular wallet address, entry point version and chain id response.", + "vali_latestPolicyWalletAddressEPVersionandChainid_id": "The description is not displayed as expected in the latest policy of a particular wallet address, entry point version and chain id response.", + "vali_latestPolicyWalletAddressEPVersionandChainid_2": "The respective validation is not displayed when an invalid wallet address is added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "vali_latestPolicyWalletAddressEPVersionandChainid_3": "The respective validation is not displayed when an incorrect wallet address is added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "vali_latestPolicyWalletAddressEPVersionandChainid_4": "The respective validation is not displayed when the wallet address is not added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "vali_latestPolicyWalletAddressEPVersionandChainid_5": "The respective validation is not displayed when an invalid entry point version is added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "vali_latestPolicyWalletAddressEPVersionandChainid_6": "The respective validation is not displayed when an incorrect entry point version is added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "vali_latestPolicyWalletAddressEPVersionandChainid_7": "The respective validation is not displayed when entry point version is not added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "vali_latestPolicyWalletAddressEPVersionandChainid_8": "The respective validation is not displayed when an invalid chainId is added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "vali_latestPolicyWalletAddressEPVersionandChainid_9": "The respective validation is not displayed when an incorrect chainId is added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "vali_latestPolicyWalletAddressEPVersionandChainid_10": "The respective validation is not displayed when chainId is not added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "vali_latestPolicyWalletAddressEPVersionandChainid_11": "The respective validation is not displayed when the wallet address, entry point version and chainId is not added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "vali_latestPolicyWalletAddressEPVersionandChainid_12": "The respective validation is not displayed when an invalid API key is added while fetching the latest policy of a particular wallet address, entry point version and chain id", + "vali_latestPolicyWalletAddressEPVersionandChainid_13": "The respective validation is not displayed when an incorrect API key is added while fetching the latest policy of a particular wallet address, entry point version and chain id", + "vali_latestPolicyWalletAddressEPVersionandChainid_14": "The respective validation is not displayed when API key is not added while fetching the latest policy of a particular wallet address, entry point version and chain id", + "fail_latestPolicyWalletAddressEPVersionandChainid_1": "An error message is displayed in the policy of a particular wallet address, entry point version and chain id API.", + "fail_latestPolicyWalletAddressEPVersionandChainid_2": "An error message is displayed when an invalid wallet address is added while fetching the latest policy of a particular wallet address, entry point version and chain id", + "fail_latestPolicyWalletAddressEPVersionandChainid_3": "An error message is displayed when an incorrect wallet address is added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "fail_latestPolicyWalletAddressEPVersionandChainid_4": "An error message is displayed when the wallet address is not added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "fail_latestPolicyWalletAddressEPVersionandChainid_5": "An error message is displayed when an invalid entry point version is added while fetching the latest policy of a particular wallet address, entry point version and chain id", + "fail_latestPolicyWalletAddressEPVersionandChainid_6": "An error message is displayed when an incorrect entry point version is added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "fail_latestPolicyWalletAddressEPVersionandChainid_7": "An error message is displayed when entry point version is not added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "fail_latestPolicyWalletAddressEPVersionandChainid_8": "An error message is displayed when an invalid chainId is added while fetching the latest policy of a particular wallet address, entry point version and chain id", + "fail_latestPolicyWalletAddressEPVersionandChainid_9": "An error message is displayed when an incorrect chainId is added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "fail_latestPolicyWalletAddressEPVersionandChainid_10": "An error message is displayed when chainId is not added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "fail_latestPolicyWalletAddressEPVersionandChainid_11": "An error message is displayed when the wallet address, entry point version and chainId is not added while fetching the latest policy of a particular wallet address, entry point version and chain id.", + "fail_latestPolicyWalletAddressEPVersionandChainid_12": "An error message is displayed when an invalid API key is added while fetching the latest policy of a particular wallet address, entry point version and chain id", + "fail_latestPolicyWalletAddressEPVersionandChainid_13": "An error message is displayed when an incorrect API key is added while fetching the latest policy of a particular wallet address, entry point version and chain id", + "fail_latestPolicyWalletAddressEPVersionandChainid_14": "An error message is displayed when API key is not added while fetching the latest policy of a particular wallet address, entry point version and chain id", + "fail_whitelistv1_1": "An error message is displayed in the whitelist v1 API.", + "fail_whitelistv2_1": "An error message is displayed in the whitelist v2 API.", + "fail_whitelistv1_2": "The validation message is not displayed when whitelisting address with invalid url and v1 API.", + "fail_whitelistv2_2": "The validation message is not displayed when whitelisting address with invalid url and v2 API.", + "vali_whitelistv1_2": "The validation message is displayed when removing non whitelisted address with invalid url and v1 API.", + "vali_whitelistv2_2": "The validation message is displayed when removing non whitelisted address with invalid url and v2 API.", + "fail_whitelistv1_3": "The validation message is not displayed when whitelisting address with incorrect url and v1 API.", + "fail_whitelistv2_3": "The validation message is not displayed when whitelisting address with incorrect url and v2 API.", + "vali_whitelistv1_3": "The validation message is displayed when removing non whitelisted address with incorrect url and v1 API.", + "vali_whitelistv2_3": "The validation message is displayed when removing non whitelisted address with incorrect url and v2 API.", + "fail_whitelistv1_4": "The validation message is not displayed when whitelisting address with invalid address and v1 API.", + "fail_whitelistv2_4": "The validation message is not displayed when whitelisting address with invalid address and v2 API.", + "vali_whitelistv1_4": "The validation message is displayed when removing non whitelisted address with invalid address and v1 API.", + "vali_whitelistv2_4": "The validation message is displayed when removing non whitelisted address with invalid address and v2 API.", + "fail_whitelistv1_44": "The validation message is not displayed when whitelisting address with incorrect address and v1 API.", + "fail_whitelistv2_44": "The validation message is not displayed when whitelisting address with incorrect address and v2 API.", + "vali_whitelistv1_44": "The validation message is displayed when removing non whitelisted address with incorrect address and v1 API.", + "vali_whitelistv2_44": "The validation message is displayed when removing non whitelisted address with incorrect address and v2 API.", + "fail_whitelistv1_5": "The validation message is not displayed when whitelisting address without address and v1 API.", + "fail_whitelistv2_5": "The validation message is not displayed when whitelisting address without address and v2 API.", + "vali_whitelistv1_5": "The validation message is displayed when removing non whitelisted address without address and v1 API.", + "vali_whitelistv2_5": "The validation message is displayed when removing non whitelisted address without address and v2 API.", + "fail_whitelistv1_6": "The validation message is not displayed when whitelisting address with invalid apikey and v1 API.", + "fail_whitelistv2_6": "The validation message is not displayed when whitelisting address with invalid apikey and v2 API.", + "vali_whitelistv1_6": "The validation message is displayed when removing non whitelisted address with invalid apikey and v1 API.", + "vali_whitelistv2_6": "The validation message is displayed when removing non whitelisted address with invalid apikey and v2 API.", + "fail_whitelistv1_7": "The validation message is not displayed when whitelisting address without apikey and v1 API.", + "fail_whitelistv2_7": "The validation message is not displayed when whitelisting address without apikey and v2 API.", + "vali_whitelistv1_7": "The validation message is displayed when removing non whitelisted address without apikey and v1 API.", + "vali_whitelistv2_7": "The validation message is displayed when removing non whitelisted address without apikey and v2 API.", + "fail_whitelistv1_8": "The validation message is not displayed when whitelisting address without chainid and v1 API.", + "fail_whitelistv2_8": "The validation message is not displayed when whitelisting address without chainid and v2 API.", + "vali_whitelistv1_8": "The validation message is displayed when removing non whitelisted address without chainid and v1 API.", + "vali_whitelistv2_8": "The validation message is displayed when removing non whitelisted address without chainid and v2 API.", + "fail_checkWhitelistv1_1": "An error message is displayed in the check whiteist v1 API.", + "fail_checkWhitelistv2_1": "An error message is displayed in the check whiteist v2 API.", + "fail_checkWhitelistv1_2": "The validation message is not displayed when checking whitelisted address with invalid url and v1 API.", + "fail_checkWhitelistv2_2": "The validation message is not displayed when checking whitelisted address with invalid url and v2 API.", + "vali_checkWhitelistv1_2": "The validation message is displayed when checking non whitelisted address with invalid url and v1 API.", + "vali_checkWhitelistv2_2": "The validation message is displayed when checking non whitelisted address with invalid url and v2 API.", + "fail_checkWhitelistv1_3": "The validation message is not displayed when checking whitelisted address with incorrect url and v1 API.", + "fail_checkWhitelistv2_3": "The validation message is not displayed when checking whitelisted address with incorrect url and v2 API.", + "vali_checkWhitelistv1_3": "The validation message is displayed when checking non whitelisted address with incorrect url and v1 API.", + "vali_checkWhitelistv2_3": "The validation message is displayed when checking non whitelisted address with incorrect url and v2 API.", + "fail_checkWhitelistv1_4": "The validation message is not displayed when checking whitelisted address with invalid address and v1 API.", + "fail_checkWhitelistv2_4": "The validation message is not displayed when checking whitelisted address with invalid address and v2 API.", + "vali_checkWhitelistv1_4": "The validation message is displayed when checking non whitelisted address with invalid address and v1 API.", + "vali_checkWhitelistv2_4": "The validation message is displayed when checking non whitelisted address with invalid address and v2 API.", + "fail_checkWhitelistv1_44": "The validation message is not displayed when checking whitelisted address with incorrect address and v1 API.", + "fail_checkWhitelistv2_44": "The validation message is not displayed when checking whitelisted address with incorrect address and v2 API.", + "vali_checkWhitelistv1_44": "The validation message is displayed when checking non whitelisted address with incorrect address and v1 API.", + "vali_checkWhitelistv2_44": "The validation message is displayed when checking non whitelisted address with incorrect address and v2 API.", + "fail_checkWhitelistv1_5": "The validation message is not displayed when checking whitelisted address without address and v1 API.", + "fail_checkWhitelistv2_5": "The validation message is not displayed when checking whitelisted address without address and v2 API.", + "vali_checkWhitelistv1_5": "The validation message is displayed when checking non whitelisted address without address and v1 API.", + "vali_checkWhitelistv2_5": "The validation message is displayed when checking non whitelisted address without address and v2 API.", + "fail_checkWhitelistv1_6": "The validation message is not displayed when checking whitelisted address with invalid apikey and v1 API.", + "fail_checkWhitelistv2_6": "The validation message is not displayed when checking whitelisted address with invalid apikey and v2 API.", + "vali_checkWhitelistv1_6": "The validation message is displayed when checking non whitelisted address with invalid apikey and v1 API.", + "vali_checkWhitelistv2_6": "The validation message is displayed when checking non whitelisted address with invalid apikey and v2 API.", + "fail_checkWhitelistv1_7": "The validation message is not displayed when checking whitelisted address without apikey and v1 API.", + "fail_checkWhitelistv2_7": "The validation message is not displayed when checking whitelisted address without apikey and v2 API.", + "vali_checkWhitelistv1_7": "The validation message is displayed when checking non whitelisted address without apikey and v1 API.", + "vali_checkWhitelistv2_7": "The validation message is displayed when checking non whitelisted address without apikey and v2 API.", + "fail_checkWhitelistv1_8": "The validation message is not displayed when checking whitelisted address without chainid and v1 API.", + "fail_checkWhitelistv2_8": "The validation message is not displayed when checking whitelisted address without chainid and v2 API.", + "vali_checkWhitelistv1_8": "The validation message is displayed when checking non whitelisted address without chainid and v1 API.", + "vali_checkWhitelistv2_8": "The validation message is displayed when checking non whitelisted address without chainid and v2 API.", + "fail_depositv1_1": "An error message is displayed in the deposit v1 API.", + "fail_depositv2_1": "An error message is displayed in the deposit v2 API.", + "fail_depositv1_2": "The validation message is not displayed when performing deposit with invalid url and v1 API.", + "fail_depositv2_2": "The validation message is not displayed when performing deposit with invalid url and v2 API.", + "vali_depositv1_2": "The validation message is displayed when performing deposit with invalid url and v1 API.", + "vali_depositv2_2": "The validation message is displayed when performing deposit with invalid url and v2 API.", + "fail_depositv1_3": "The validation message is not displayed when performing deposit with incorrect url and v1 API.", + "fail_depositv2_3": "The validation message is not displayed when performing deposit with incorrect url and v2 API.", + "vali_depositv1_3": "The validation message is displayed when performing deposit with incorrect url and v1 API.", + "vali_depositv2_3": "The validation message is displayed when performing deposit with incorrect url and v2 API.", + "fail_depositv1_4": "The validation message is not displayed when performing deposit with invalid value and v1 API.", + "fail_depositv2_4": "The validation message is not displayed when performing deposit with invalid value and v2 API.", + "vali_depositv1_4": "The validation message is displayed when performing deposit with invalid value and v1 API.", + "vali_depositv2_4": "The validation message is displayed when performing deposit with invalid value and v2 API.", + "fail_depositv1_44": "The validation message is not displayed when performing deposit with exceeded value and v1 API.", + "fail_depositv2_44": "The validation message is not displayed when performing deposit with exceeded value and v2 API.", + "vali_depositv1_44": "The validation message is displayed when performing deposit with exceeded value and v1 API.", + "vali_depositv2_44": "The validation message is displayed when performing deposit with exceeded value and v2 API.", + "fail_depositv1_5": "The validation message is not displayed when performing deposit without value and v1 API.", + "fail_depositv2_5": "The validation message is not displayed when performing deposit without value and v2 API.", + "vali_depositv1_5": "The validation message is displayed when performing deposit without value and v1 API.", + "vali_depositv2_5": "The validation message is displayed when performing deposit without value and v2 API.", + "fail_depositv1_6": "The validation message is not displayed when performing deposit with invalid apikey and v1 API.", + "fail_depositv2_6": "The validation message is not displayed when performing deposit with invalid apikey and v2 API.", + "vali_depositv1_6": "The validation message is displayed when performing deposit with invalid apikey and v1 API.", + "vali_depositv2_6": "The validation message is displayed when performing deposit with invalid apikey and v2 API.", + "fail_depositv1_7": "The validation message is not displayed when performing deposit without apikey and v1 API.", + "fail_depositv2_7": "The validation message is not displayed when performing deposit without apikey and v2 API.", + "vali_depositv1_7": "The validation message is displayed when performing deposit without apikey and v1 API.", + "vali_depositv2_7": "The validation message is displayed when performing deposit without apikey and v2 API.", + "fail_depositv1_8": "The validation message is not displayed when removing whitelisted address without chainid and v1 API.", + "fail_depositv2_8": "The validation message is not displayed when removing whitelisted address without chainid and v2 API.", + "vali_depositv1_8": "The validation message is displayed when performing deposit without chainid and v1 API.", + "vali_depositv2_8": "The validation message is displayed when performing deposit without chainid and v2 API.", + "fail_removeWhitelistv1_1": "An error message is displayed in the remove whitelist v1 API.", + "fail_removeWhitelistv2_1": "An error message is displayed in the remove whitelist v2 API.", + "fail_removeWhitelistv1_0": "The validation message is not displayed when removing non whitelisted address with v1 API.", + "fail_removeWhitelistv2_0": "The validation message is not displayed when removing non whitelisted address with v2 API.", + "vali_removeWhitelistv1_1": "The validation message is displayed when removing non whitelisted address with v1 API.", + "vali_removeWhitelistv2_1": "The validation message is displayed when removing non whitelisted address with v2 API.", + "fail_removeWhitelistv1_2": "The validation message is not displayed when removing whitelisted address with invalid url and v1 API.", + "fail_removeWhitelistv2_2": "The validation message is not displayed when removing whitelisted address with invalid url and v2 API.", + "vali_removeWhitelistv1_2": "The validation message is displayed when removing non whitelisted address with invalid url and v1 API.", + "vali_removeWhitelistv2_2": "The validation message is displayed when removing non whitelisted address with invalid url and v2 API.", + "fail_removeWhitelistv1_3": "The validation message is not displayed when removing whitelisted address with incorrect url and v1 API.", + "fail_removeWhitelistv2_3": "The validation message is not displayed when removing whitelisted address with incorrect url and v2 API.", + "vali_removeWhitelistv1_3": "The validation message is displayed when removing non whitelisted address with incorrect url and v1 API.", + "vali_removeWhitelistv2_3": "The validation message is displayed when removing non whitelisted address with incorrect url and v2 API.", + "fail_removeWhitelistv1_4": "The validation message is not displayed when removing whitelisted address with invalid address and v1 API.", + "fail_removeWhitelistv2_4": "The validation message is not displayed when removing whitelisted address with invalid address and v2 API.", + "vali_removeWhitelistv1_4": "The validation message is displayed when removing non whitelisted address with invalid address and v1 API.", + "vali_removeWhitelistv2_4": "The validation message is displayed when removing non whitelisted address with invalid address and v2 API.", + "fail_removeWhitelistv1_44": "The validation message is not displayed when removing whitelisted address with incorrect address and v1 API.", + "fail_removeWhitelistv2_44": "The validation message is not displayed when removing whitelisted address with incorrect address and v2 API.", + "vali_removeWhitelistv1_44": "The validation message is displayed when removing non whitelisted address with incorrect address and v1 API.", + "vali_removeWhitelistv2_44": "The validation message is displayed when removing non whitelisted address with incorrect address and v2 API.", + "fail_removeWhitelistv1_5": "The validation message is not displayed when removing whitelisted address without address and v1 API.", + "fail_removeWhitelistv2_5": "The validation message is not displayed when removing whitelisted address without address and v2 API.", + "vali_removeWhitelistv1_5": "The validation message is displayed when removing non whitelisted address without address and v1 API.", + "vali_removeWhitelistv2_5": "The validation message is displayed when removing non whitelisted address without address and v2 API.", + "fail_removeWhitelistv1_6": "The validation message is not displayed when removing whitelisted address with invalid apikey and v1 API.", + "fail_removeWhitelistv2_6": "The validation message is not displayed when removing whitelisted address with invalid apikey and v2 API.", + "vali_removeWhitelistv1_6": "The validation message is displayed when removing non whitelisted address with invalid apikey and v1 API.", + "vali_removeWhitelistv2_6": "The validation message is displayed when removing non whitelisted address with invalid apikey and v2 API.", + "fail_removeWhitelistv1_7": "The validation message is not displayed when removing whitelisted address without apikey and v1 API.", + "fail_removeWhitelistv2_7": "The validation message is not displayed when removing whitelisted address without apikey and v2 API.", + "vali_removeWhitelistv1_7": "The validation message is displayed when removing non whitelisted address without apikey and v1 API.", + "vali_removeWhitelistv2_7": "The validation message is displayed when removing non whitelisted address without apikey and v2 API.", + "fail_removeWhitelistv1_8": "The validation message is not displayed when removing whitelisted address without chainid and v1 API.", + "fail_removeWhitelistv2_8": "The validation message is not displayed when removing whitelisted address without chainid and v2 API.", + "vali_removeWhitelistv1_8": "The validation message is displayed when removing non whitelisted address without chainid and v1 API.", + "vali_removeWhitelistv2_8": "The validation message is displayed when removing non whitelisted address without chainid and v2 API.", + "fail_getAllWhitelistv2_1": "An error message is displayed in the get all whitelist v2 API.", + "vali_getAllWhitelistv2_1": "The validation message is displayed while getting all whitelisted address with v2 API.", + "fail_getAllWhitelistv2_2": "The validation message is not displayed while getting all whitelisted address with invalid url and v2 API.", + "vali_getAllWhitelistv2_2": "The validation message is displayed getting all whitelisted address with invalid url and v2 API.", + "fail_getAllWhitelistv2_3": "The validation message is not displayed while getting all whitelisted address with incorrect url and v2 API.", + "vali_getAllWhitelistv2_3": "The validation message is displayed getting all whitelisted address with incorrect url and v2 API.", + "fail_getAllWhitelistv2_6": "The validation message is not displayed while getting all whitelisted address with invalid apikey and v2 API.", + "vali_getAllWhitelistv2_6": "The validation message is displayed getting all whitelisted address with invalid apikey and v2 API.", + "fail_getAllWhitelistv2_7": "The validation message is not displayed while getting all whitelisted address without apikey and v2 API.", + "vali_getAllWhitelistv2_7": "The validation message is displayed getting all whitelisted address without apikey and v2 API.", + "fail_getAllWhitelistv2_8": "The validation message is not displayed while getting all whitelisted address without chainid and v2 API.", + "vali_getAllWhitelistv2_8": "The validation message is displayed getting all whitelisted address without chainid and v2 API.", + "fail_getAllWhitelistv2_9": "The validation message is not displayed while getting all whitelisted address with v2 API.", + "vali_getAllWhitelistv2_9": "The validation message is displayed while getting all whitelisted address with v2 API.", + "vali_getAllWhitelist_addresses": "The addresses parameter is not displayed in the getAllWhitelist response.", + "fail_pimlicoAddress_1": "An error message is displayed in the pimlico address endpoint.", + "fail_pimlicoAddress_2": "The validation message is not displayed while performing pimlico address with invalid url API.", + "vali_pimlicoAddress_2": "The validation message is displayed while performing pimlico address with invalid url API.", + "fail_pimlicoAddress_3": "The validation message is not displayed while performing pimlico address with incorrect url API.", + "vali_pimlicoAddress_3": "The validation message is displayed while performing pimlico address with incorrect url API.", + "fail_pimlicoAddress_4": "The validation message is not displayed while performing pimlico address with invalid address API.", + "vali_pimlicoAddress_4": "The validation message is displayed while performing pimlico address with invalid address API.", + "fail_pimlicoAddress_44": "The validation message is not displayed while performing pimlico address with incorrect address API.", + "vali_pimlicoAddress_44": "The validation message is displayed while performing pimlico address with incorrect address API.", + "fail_pimlicoAddress_5": "The validation message is not displayed while performing pimlico address without address API.", + "vali_pimlicoAddress_5": "The validation message is displayed while performing pimlico address without address API.", + "fail_pimlicoAddress_6": "The validation message is not displayed while performing pimlico address with invalid apikey API.", + "vali_pimlicoAddress_6": "The validation message is displayed while performing pimlico address with invalid apikey API.", + "fail_pimlicoAddress_7": "The validation message is not displayed while performing pimlico address without apikey API.", + "vali_pimlicoAddress_7": "The validation message is displayed while performing pimlico address without apikey API.", + "fail_pimlicoAddress_8": "The validation message is not displayed while performing pimlico address without chainid API.", + "vali_pimlicoAddress_8": "The validation message is displayed while performing pimlico address without chainid API.", + "fail_pimlicoAddress_9": "The validation message is not displayed while performing pimlico address without chainid API.", + "vali_pimlicoAddress_9": "The validation message is displayed while performing pimlico address without chainid API.", + "fail_pimlicoAddress_10": "The validation message is not displayed while performing pimlico address without chainid API.", + "vali_pimlicoAddress_10": "The validation message is displayed while performing pimlico address without chainid API.", + "fail_metadata_2": "The validation message is not displayed while performing metadata with invalid url API.", + "vali_metadata_2": "The validation message is displayed while performing metadata with invalid url API.", + "fail_metadata_3": "The validation message is not displayed while performing metadata with incorrect url API.", + "vali_metadata_3": "The validation message is displayed while performing metadata with incorrect url API.", + "fail_metadata_6": "The validation message is not displayed while performing metadata with invalid apikey API.", + "vali_metadata_6": "The validation message is displayed while performing metadata with invalid apikey API.", + "fail_metadata_7": "The validation message is not displayed while performing metadata without apikey API.", + "vali_metadata_7": "The validation message is displayed while performing metadata without apikey API.", + "fail_metadata_8": "The validation message is not displayed while performing metadata without chainid API.", + "vali_metadata_8": "The validation message is displayed while performing metadata without chainid API.", + + "fail_skandha_getGasPrice_1": "An error message is displayed in the skandha_getGasPrice endpoint.", + "fail_skandha_getGasPrice_2": "The validation message is not displayed in the skandha_getGasPrice endpoint when entered invalid method name.", + "vali_skandha_getGasPrice_2": "The validation message is displayed in the skandha_getGasPrice endpoint when entered invalid method name.", + "fail_skandha_getGasPrice_3": "The validation message is not displayed in the skandha_getGasPrice endpoint when entered incorrect method name.", + "vali_skandha_getGasPrice_3": "The validation message is displayed in the skandha_getGasPrice endpoint when entered incorrect method name.", + "fail_skandha_getGasPrice_4": "The validation message is not displayed in the skandha_getGasPrice endpoint when not entered method name.", + "vali_skandha_getGasPrice_4": "The validation message is displayed in the skandha_getGasPrice endpoint when not entered method name.", + "vali_skandha_getGasPrice_id": "The id value is empty in the skandha_getGasPrice response.", + "vali_skandha_getGasPrice_maxPriorityFeePerGas": "The maxPriorityFeePerGas value is empty in the skandha_getGasPrice response.", + "vali_skandha_getGasPrice_maxFeePerGas": "The maxFeePerGas value is empty in the skandha_getGasPrice response.", + + "fail_skandha_feeHistory_1": "An error message is displayed in the skandha_feeHistory endpoint.", + "fail_skandha_feeHistory_2": "The validation message is not displayed in the skandha_feeHistory endpoint when entered invalid method name.", + "vali_skandha_feeHistory_2": "The validation message is displayed in the skandha_feeHistory endpoint when entered invalid method name.", + "fail_skandha_feeHistory_3": "The validation message is not displayed in the skandha_feeHistory endpoint when entered incorrect method name.", + "vali_skandha_feeHistory_3": "The validation message is displayed in the skandha_feeHistory endpoint when entered incorrect method name.", + "fail_skandha_feeHistory_4": "The validation message is not displayed in the skandha_feeHistory endpoint when not entered method name.", + "vali_skandha_feeHistory_4": "The validation message is displayed in the skandha_feeHistory endpoint when not entered method name.", + "fail_skandha_feeHistory_5": "The validation message is not displayed in the skandha_feeHistory endpoint when entered invalid entry point address.", + "vali_skandha_feeHistory_5": "The validation message is displayed in the skandha_feeHistory endpoint when entered invalid entry point address.", + "fail_skandha_feeHistory_6": "The validation message is not displayed in the skandha_feeHistory endpoint when entered incorrect entry point address.", + "vali_skandha_feeHistory_6": "The validation message is displayed in the skandha_feeHistory endpoint when entered incorrect entry point address.", + "fail_skandha_feeHistory_7": "The validation message is not displayed in the skandha_feeHistory endpoint when not entered entry point address.", + "vali_skandha_feeHistory_7": "The validation message is displayed in the skandha_feeHistory endpoint when not entered entry point address.", + "vali_skandha_feeHistory_id": "The id value is empty in the skandha_feeHistory response.", + "vali_skandha_feeHistory_actualGasPrice": "The actualGasPrice value is empty in the skandha_feeHistory response.", + "vali_skandha_feeHistory_maxPriorityFeePerGas": "The maxPriorityFeePerGas value is empty in the skandha_feeHistory response.", + "vali_skandha_feeHistory_maxFeePerGas": "The maxFeePerGas value is empty in the skandha_feeHistory response.", + + "fail_skandha_config_1": "An error message is displayed in the skandha_config endpoint.", + "fail_skandha_config_2": "The validation message is not displayed in the skandha_config endpoint when entered invalid method name.", + "vali_skandha_config_2": "The validation message is displayed in the skandha_config endpoint when entered invalid method name.", + "fail_skandha_config_3": "The validation message is not displayed in the skandha_config endpoint when entered incorrect method name.", + "vali_skandha_config_3": "The validation message is displayed in the skandha_config endpoint when entered incorrect method name.", + "fail_skandha_config_4": "The validation message is not displayed in the skandha_config endpoint when not entered method name.", + "vali_skandha_config_4": "The validation message is displayed in the skandha_config endpoint when not entered method name.", + "vali_skandha_config_id": "The id value is empty in the skandha_config response.", + "vali_skandha_config_flags": "The flags value is empty in the skandha_config response.", + "vali_skandha_config_entryPoints": "The entryPoints value is empty in the skandha_config response.", + "vali_skandha_config_beneficiary": "The beneficiary value is empty in the skandha_config response.", + "vali_skandha_config_relayers": "The relayers value is empty in the skandha_config response.", + + "fail_skandha_getUserOperationByHash_1": "An error message is displayed in the eth_getUserOperationByHash endpoint.", + "fail_skandha_getUserOperationByHash_2": "The validation message is not displayed in the eth_getUserOperationByHash endpoint when entered invalid hash address.", + "vali_skandha_getUserOperationByHash_2": "The validation message is displayed in the eth_getUserOperationByHash endpoint when entered invalid hash address.", + "fail_skandha_getUserOperationByHash_3": "The validation message is not displayed in the eth_getUserOperationByHash endpoint when entered incorrect hash address.", + "vali_skandha_getUserOperationByHash_3": "The validation message is displayed in the eth_getUserOperationByHash endpoint when entered incorrect hash address.", + "fail_skandha_getUserOperationByHash_4": "The validation message is not displayed in the eth_getUserOperationByHash endpoint when not entered hash address.", + "vali_skandha_getUserOperationByHash_4": "The validation message is displayed in the eth_getUserOperationByHash endpoint when not entered hash address.", + "fail_skandha_getUserOperationByHash_5": "The validation message is not displayed in the eth_getUserOperationByHash endpoint when entered invalid method name.", + "vali_skandha_getUserOperationByHash_5": "The validation message is displayed in the eth_getUserOperationByHash endpoint when entered invalid method name.", + "fail_skandha_getUserOperationByHash_6": "The validation message is not displayed in the eth_getUserOperationByHash endpoint when entered incorrect method name.", + "vali_skandha_getUserOperationByHash_6": "The validation message is displayed in the eth_getUserOperationByHash endpoint when entered incorrect method name.", + "fail_skandha_getUserOperationByHash_7": "The validation message is not displayed in the eth_getUserOperationByHash endpoint when not entered method name.", + "vali_skandha_getUserOperationByHash_7": "The validation message is displayed in the eth_getUserOperationByHash endpoint when not entered method name.", + "vali_skandha_getUserOperationByHash_id": "The id value is empty in the eth_getUserOperationByHash response.", + "vali_skandha_getUserOperationByHash_sender": "The sender value is empty in the eth_getUserOperationByHash response.", + "vali_skandha_getUserOperationByHash_nonce": "The nonce value is empty in the eth_getUserOperationByHash response.", + "vali_skandha_getUserOperationByHash_initCode": "The initCode value is empty in the eth_getUserOperationByHash response.", + "vali_skandha_getUserOperationByHash_callData": "The callData value is empty in the eth_getUserOperationByHash response.", + "vali_skandha_getUserOperationByHash_callGasLimit": "The callGasLimit value is empty in the eth_getUserOperationByHash response.", + "vali_skandha_getUserOperationByHash_verificationGasLimit": "The verificationGasLimit value is empty in the eth_getUserOperationByHash response.", + "vali_skandha_getUserOperationByHash_preVerificationGas": "The preVerificationGas value is empty in the eth_getUserOperationByHash response.", + "vali_skandha_getUserOperationByHash_maxFeePerGas": "The maxFeePerGas value is empty in the eth_getUserOperationByHash response.", + "vali_skandha_getUserOperationByHash_maxPriorityFeePerGas": "The maxPriorityFeePerGas value is empty in the eth_getUserOperationByHash response.", + "vali_skandha_getUserOperationByHash_paymasterAndData": "The paymasterAndData value is empty in the eth_getUserOperationByHash response.", + "vali_skandha_getUserOperationByHash_signature": "The signature value is empty in the eth_getUserOperationByHash response.", + "vali_skandha_getUserOperationByHash_entryPoint": "The entryPoint value is empty in the eth_getUserOperationByHash response.", + + "fail_skandha_getUserOperationReceipt_1": "An error message is displayed in the eth_getUserOperationReceipt endpoint.", + "fail_skandha_getUserOperationReceipt_2": "The validation message is not displayed in the eth_getUserOperationReceipt endpoint when entered invalid hash address.", + "vali_skandha_getUserOperationReceipt_2": "The validation message is displayed in the eth_getUserOperationReceipt endpoint when entered invalid hash address.", + "fail_skandha_getUserOperationReceipt_3": "The validation message is not displayed in the eth_getUserOperationReceipt endpoint when entered incorrect hash address.", + "vali_skandha_getUserOperationReceipt_3": "The validation message is displayed in the eth_getUserOperationReceipt endpoint when entered incorrect hash address.", + "fail_skandha_getUserOperationReceipt_4": "The validation message is not displayed in the eth_getUserOperationReceipt endpoint when not entered hash address.", + "vali_skandha_getUserOperationReceipt_4": "The validation message is displayed in the eth_getUserOperationReceipt endpoint when not entered hash address.", + "fail_skandha_getUserOperationReceipt_5": "The validation message is not displayed in the eth_getUserOperationReceipt endpoint when entered invalid method name.", + "vali_skandha_getUserOperationReceipt_5": "The validation message is displayed in the eth_getUserOperationReceipt endpoint when entered invalid method name.", + "fail_skandha_getUserOperationReceipt_6": "The validation message is not displayed in the eth_getUserOperationReceipt endpoint when entered incorrect method name.", + "vali_skandha_getUserOperationReceipt_6": "The validation message is displayed in the eth_getUserOperationReceipt endpoint when entered incorrect method name.", + "fail_skandha_getUserOperationReceipt_7": "The validation message is not displayed in the eth_getUserOperationReceipt endpoint when not entered method name.", + "vali_skandha_getUserOperationReceipt_7": "The validation message is displayed in the eth_getUserOperationReceipt endpoint when not entered method name.", + "vali_skandha_getUserOperationReceipt_id": "The id value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_userOpHash": "The userOpHash value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_sender": "The sender value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_nonce": "The nonce value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_actualGasCost": "The actualGasCost value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_actualGasUsed": "The actualGasUsed value is empty in the eth_getUserOperationReceipt response.", + + "vali_skandha_getUserOperationReceipt_to": "The to value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_from": "The from value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_transactionIndex": "The transactionIndex value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_gasUsed": "The gasUsed value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_logsBloom": "The logsBloom value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_blockHash": "The blockHash value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_transactionHash": "The transactionHash value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_logs": "The logs value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_blockNumber": "The blockNumber value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_confirmations": "The confirmations value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_cumulativeGasUsed": "The cumulativeGasUsed value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_effectiveGasPrice": "The effectiveGasPrice value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_status": "The status value is empty in the eth_getUserOperationReceipt response.", + "vali_skandha_getUserOperationReceipt_type": "The type value is empty in the eth_getUserOperationReceipt response.", + + "fail_skandha_estimateUserOperationGas_1": "An error message is displayed in the eth_estimateUserOperationGas endpoint.", + "fail_skandha_estimateUserOperationGas_2": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid sender address.", + "vali_skandha_estimateUserOperationGas_2": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid sender address.", + "fail_skandha_estimateUserOperationGas_3": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect sender address.", + "vali_skandha_estimateUserOperationGas_3": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect sender address.", + "fail_skandha_estimateUserOperationGas_4": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered sender address.", + "vali_skandha_estimateUserOperationGas_4": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered sender address.", + "fail_skandha_estimateUserOperationGas_5": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid nonce.", + "vali_skandha_estimateUserOperationGas_5": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid nonce.", + "fail_skandha_estimateUserOperationGas_6": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect nonce.", + "vali_skandha_estimateUserOperationGas_6": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect nonce.", + "fail_skandha_estimateUserOperationGas_7": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered nonce.", + "vali_skandha_estimateUserOperationGas_7": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered nonce.", + "fail_skandha_estimateUserOperationGas_8": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid initCode.", + "vali_skandha_estimateUserOperationGas_8": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid initCode.", + "fail_skandha_estimateUserOperationGas_9": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect initCode.", + "vali_skandha_estimateUserOperationGas_9": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect initCode.", + "fail_skandha_estimateUserOperationGas_10": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered initCode.", + "vali_skandha_estimateUserOperationGas_10": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered initCode.", + "fail_skandha_estimateUserOperationGas_11": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid callData.", + "vali_skandha_estimateUserOperationGas_11": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid callData.", + "fail_skandha_estimateUserOperationGas_12": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect callData.", + "vali_skandha_estimateUserOperationGas_12": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect callData.", + "fail_skandha_estimateUserOperationGas_13": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered callData.", + "vali_skandha_estimateUserOperationGas_13": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered callData.", + "fail_skandha_estimateUserOperationGas_14": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid callGasLimit.", + "vali_skandha_estimateUserOperationGas_14": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid callGasLimit.", + "fail_skandha_estimateUserOperationGas_15": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect callGasLimit.", + "vali_skandha_estimateUserOperationGas_15": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect callGasLimit.", + "fail_skandha_estimateUserOperationGas_16": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered callGasLimit.", + "vali_skandha_estimateUserOperationGas_16": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered callGasLimit.", + "fail_skandha_estimateUserOperationGas_17": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid verificationGasLimit.", + "vali_skandha_estimateUserOperationGas_17": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid verificationGasLimit.", + "fail_skandha_estimateUserOperationGas_18": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect verificationGasLimit.", + "vali_skandha_estimateUserOperationGas_18": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect verificationGasLimit.", + "fail_skandha_estimateUserOperationGas_19": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered verificationGasLimit.", + "vali_skandha_estimateUserOperationGas_19": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered verificationGasLimit.", + "fail_skandha_estimateUserOperationGas_20": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid preVerificationGas.", + "vali_skandha_estimateUserOperationGas_20": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid preVerificationGas.", + "fail_skandha_estimateUserOperationGas_21": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect preVerificationGas.", + "vali_skandha_estimateUserOperationGas_21": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect preVerificationGas.", + "fail_skandha_estimateUserOperationGas_22": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered preVerificationGas.", + "vali_skandha_estimateUserOperationGas_22": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered preVerificationGas.", + "fail_skandha_estimateUserOperationGas_23": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid maxPriorityFeePerGas.", + "vali_skandha_estimateUserOperationGas_23": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid maxPriorityFeePerGas.", + "fail_skandha_estimateUserOperationGas_24": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect maxPriorityFeePerGas.", + "vali_skandha_estimateUserOperationGas_24": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect maxPriorityFeePerGas.", + "fail_skandha_estimateUserOperationGas_25": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered maxPriorityFeePerGas.", + "vali_skandha_estimateUserOperationGas_25": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered maxPriorityFeePerGas.", + "fail_skandha_estimateUserOperationGas_26": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid maxFeePerGas.", + "vali_skandha_estimateUserOperationGas_26": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid maxFeePerGas.", + "fail_skandha_estimateUserOperationGas_27": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect maxFeePerGas.", + "vali_skandha_estimateUserOperationGas_27": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect maxFeePerGas.", + "fail_skandha_estimateUserOperationGas_28": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered maxFeePerGas.", + "vali_skandha_estimateUserOperationGas_28": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered maxFeePerGas.", + "fail_skandha_estimateUserOperationGas_29": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid paymasterAndData.", + "vali_skandha_estimateUserOperationGas_29": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid paymasterAndData.", + "fail_skandha_estimateUserOperationGas_30": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect paymasterAndData.", + "vali_skandha_estimateUserOperationGas_30": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect paymasterAndData.", + "fail_skandha_estimateUserOperationGas_31": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered paymasterAndData.", + "vali_skandha_estimateUserOperationGas_31": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered paymasterAndData.", + "fail_skandha_estimateUserOperationGas_32": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid signature.", + "vali_skandha_estimateUserOperationGas_32": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid signature.", + "fail_skandha_estimateUserOperationGas_33": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect signature.", + "vali_skandha_estimateUserOperationGas_33": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect signature.", + "fail_skandha_estimateUserOperationGas_34": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered signature.", + "vali_skandha_estimateUserOperationGas_34": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered signature.", + "fail_skandha_estimateUserOperationGas_35": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid entry point address.", + "vali_skandha_estimateUserOperationGas_35": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid entry point address.", + "fail_skandha_estimateUserOperationGas_36": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect entry point address.", + "vali_skandha_estimateUserOperationGas_36": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect entry point address.", + "fail_skandha_estimateUserOperationGas_37": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered entry point address.", + "vali_skandha_estimateUserOperationGas_37": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered entry point address.", + "fail_skandha_estimateUserOperationGas_38": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered invalid method name.", + "vali_skandha_estimateUserOperationGas_38": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered invalid method name.", + "fail_skandha_estimateUserOperationGas_39": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when entered incorrect method name.", + "vali_skandha_estimateUserOperationGas_39": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when entered incorrect method name.", + "fail_skandha_estimateUserOperationGas_40": "The validation message is not displayed in the eth_estimateUserOperationGas endpoint when not entered method name.", + "vali_skandha_estimateUserOperationGas_40": "The validation message is displayed in the eth_estimateUserOperationGas endpoint when not entered method name.", + "vali_skandha_estimateUserOperationGas_id": "The id value is empty in the eth_estimateUserOperationGas response.", + "vali_skandha_estimateUserOperationGas_preVerificationGas": "The preVerificationGas value is empty in the eth_estimateUserOperationGas response.", + "vali_skandha_estimateUserOperationGas_verificationGasLimit": "The verificationGasLimit value is empty in the eth_estimateUserOperationGas response.", + "vali_skandha_estimateUserOperationGas_callGasLimit": "The callGasLimit value is empty in the eth_estimateUserOperationGas response.", + "vali_skandha_estimateUserOperationGas_verificationGas": "The verificationGas value is empty in the eth_estimateUserOperationGas response.", + "vali_skandha_estimateUserOperationGas_maxFeePerGas": "The maxFeePerGas value is empty in the eth_estimateUserOperationGas response.", + "vali_skandha_estimateUserOperationGas_maxPriorityFeePerGas": "The maxPriorityFeePerGas value is empty in the eth_estimateUserOperationGas response.", + "vali_skandha_estimateUserOperationGas_validUntil": "The validUntil value is empty in the eth_estimateUserOperationGas response.", + + "fail_skandha_sendUserOperation_1": "An error message is displayed in the eth_sendUserOperation endpoint.", + "fail_skandha_sendUserOperation_2": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid sender address.", + "vali_skandha_sendUserOperation_2": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid sender address.", + "fail_skandha_sendUserOperation_3": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect sender address.", + "vali_skandha_sendUserOperation_3": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect sender address.", + "fail_skandha_sendUserOperation_4": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered sender address.", + "vali_skandha_sendUserOperation_4": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered sender address.", + "fail_skandha_sendUserOperation_5": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid nonce.", + "vali_skandha_sendUserOperation_5": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid nonce.", + "fail_skandha_sendUserOperation_6": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect nonce.", + "vali_skandha_sendUserOperation_6": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect nonce.", + "fail_skandha_sendUserOperation_7": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered nonce.", + "vali_skandha_sendUserOperation_7": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered nonce.", + "fail_skandha_sendUserOperation_8": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid initCode.", + "vali_skandha_sendUserOperation_8": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid initCode.", + "fail_skandha_sendUserOperation_9": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect initCode.", + "vali_skandha_sendUserOperation_9": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect initCode.", + "fail_skandha_sendUserOperation_10": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered initCode.", + "vali_skandha_sendUserOperation_10": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered initCode.", + "fail_skandha_sendUserOperation_11": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid callData.", + "vali_skandha_sendUserOperation_11": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid callData.", + "fail_skandha_sendUserOperation_12": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect callData.", + "vali_skandha_sendUserOperation_12": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect callData.", + "fail_skandha_sendUserOperation_13": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered callData.", + "vali_skandha_sendUserOperation_13": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered callData.", + "fail_skandha_sendUserOperation_14": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid callGasLimit.", + "vali_skandha_sendUserOperation_14": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid callGasLimit.", + "fail_skandha_sendUserOperation_15": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect callGasLimit.", + "vali_skandha_sendUserOperation_15": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect callGasLimit.", + "fail_skandha_sendUserOperation_16": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered callGasLimit.", + "vali_skandha_sendUserOperation_16": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered callGasLimit.", + "fail_skandha_sendUserOperation_17": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid verificationGasLimit.", + "vali_skandha_sendUserOperation_17": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid verificationGasLimit.", + "fail_skandha_sendUserOperation_18": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect verificationGasLimit.", + "vali_skandha_sendUserOperation_18": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect verificationGasLimit.", + "fail_skandha_sendUserOperation_19": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered verificationGasLimit.", + "vali_skandha_sendUserOperation_19": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered verificationGasLimit.", + "fail_skandha_sendUserOperation_20": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid preVerificationGas.", + "vali_skandha_sendUserOperation_20": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid preVerificationGas.", + "fail_skandha_sendUserOperation_21": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect preVerificationGas.", + "vali_skandha_sendUserOperation_21": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect preVerificationGas.", + "fail_skandha_sendUserOperation_22": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered preVerificationGas.", + "vali_skandha_sendUserOperation_22": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered preVerificationGas.", + "fail_skandha_sendUserOperation_23": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid maxPriorityFeePerGas.", + "vali_skandha_sendUserOperation_23": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid maxPriorityFeePerGas.", + "fail_skandha_sendUserOperation_24": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect maxPriorityFeePerGas.", + "vali_skandha_sendUserOperation_24": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect maxPriorityFeePerGas.", + "fail_skandha_sendUserOperation_25": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered maxPriorityFeePerGas.", + "vali_skandha_sendUserOperation_25": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered maxPriorityFeePerGas.", + "fail_skandha_sendUserOperation_26": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid maxFeePerGas.", + "vali_skandha_sendUserOperation_26": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid maxFeePerGas.", + "fail_skandha_sendUserOperation_27": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect maxFeePerGas.", + "vali_skandha_sendUserOperation_27": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect maxFeePerGas.", + "fail_skandha_sendUserOperation_28": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered maxFeePerGas.", + "vali_skandha_sendUserOperation_28": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered maxFeePerGas.", + "fail_skandha_sendUserOperation_29": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid paymasterAndData.", + "vali_skandha_sendUserOperation_29": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid paymasterAndData.", + "fail_skandha_sendUserOperation_30": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect paymasterAndData.", + "vali_skandha_sendUserOperation_30": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect paymasterAndData.", + "fail_skandha_sendUserOperation_31": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered paymasterAndData.", + "vali_skandha_sendUserOperation_31": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered paymasterAndData.", + "fail_skandha_sendUserOperation_32": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid signature.", + "vali_skandha_sendUserOperation_32": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid signature.", + "fail_skandha_sendUserOperation_33": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect signature.", + "vali_skandha_sendUserOperation_33": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect signature.", + "fail_skandha_sendUserOperation_34": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered signature.", + "vali_skandha_sendUserOperation_34": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered signature.", + "fail_skandha_sendUserOperation_35": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid entry point address.", + "vali_skandha_sendUserOperation_35": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid entry point address.", + "fail_skandha_sendUserOperation_36": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect entry point address.", + "vali_skandha_sendUserOperation_36": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect entry point address.", + "fail_skandha_sendUserOperation_37": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered entry point address.", + "vali_skandha_sendUserOperation_37": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered entry point address.", + "fail_skandha_sendUserOperation_38": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered invalid method name.", + "vali_skandha_sendUserOperation_38": "The validation message is displayed in the eth_sendUserOperation endpoint when entered invalid method name.", + "fail_skandha_sendUserOperation_39": "The validation message is not displayed in the eth_sendUserOperation endpoint when entered incorrect method name.", + "vali_skandha_sendUserOperation_39": "The validation message is displayed in the eth_sendUserOperation endpoint when entered incorrect method name.", + "fail_skandha_sendUserOperation_40": "The validation message is not displayed in the eth_sendUserOperation endpoint when not entered method name.", + "vali_skandha_sendUserOperation_40": "The validation message is displayed in the eth_sendUserOperation endpoint when not entered method name.", + "vali_skandha_sendUserOperation_id": "The id value is empty in the eth_sendUserOperation response.", + "vali_skandha_sendUserOperation_result": "The result value is empty in the eth_sendUserOperation response." } diff --git a/test/data/testData.json b/test/data/testData.json index b8cdaac..4f1e713 100644 --- a/test/data/testData.json +++ b/test/data/testData.json @@ -14,6 +14,7 @@ "nft_tokenAddress": "0xe55C5793a52AF819fBf3e87a23B36708E6FDd2Cc", "entryPointAddress": "0x5FF137D4b0FDCD49DcA30c7CF57E578a026d2789", "invalidEntryPointAddress": "0x5FF137D4b0FDCD49DcA30c7CF57E578a026d278", + "incorrectentryPointAddress": "0x5a3132D4b0FDCD49acA30c7CF57a578a026d2719", "zerodev_address": "0x50Dd0c06cB35D015446a82D13F7451E43ff8769d", "simpleaccount_address": "0x9cD261Fa42287c7E24aBe62e6AAe165949628246", "paymasterAddress": "0xEc43912D8C772A0Eba5a27ea5804Ba14ab502009", @@ -138,12 +139,66 @@ "recipient3": "0xeD1098F954CC08936e4888Eb3EBAAcA6f15bD061", "recipient4": "0x81a13A210717b765C5733aD70D81deC1DF1834dd", "recipient5": "0xE05FB316eB8C4ba7288D43c1bd87BE8a8d16761C", + "txCount": 50, + "transactionHash": "0x639d74d9a681f19bc3a32ae4b5ee4b2562cec8458508babe0ac727d96b153360", + "invalid_transactionHash": "0x639d74d9a681f19bc3a32ae4b5ee4b2562cec8458508babe0ac727d96b15336", + "incorrect_transactionHash": "0x639d24d9a682f19bc3a32a14b5ee4b2562cec8458508ba1e0ac727d96b153160", + "arka_fqdn": "https://arka-qa.etherspot.io", + "arka_addPolicy": "add-policy", + "arka_updatePolicy": "update-policy", + "arka_deletePolicy": "delete-policy", + "arka_enablePolicy": "enable-policy", + "arka_disablePolicy": "disable-policy", + "arka_policy": "policy", + "arka_policyWalletAddress": "policy/wallet-address", + "globalMaximumUsd": "6000.0000", + "globalMaximumNative": "2000.000000000000000000", + "globalMaximumOpCount": 2000, + "perUserMaximumApplicable": true, + "perUserMaximumUsd": "200.0000", + "perUserMaximumNative": "300.000000000000000000", + "perUserMaximumOpCount": 60, + "perOpMaximumApplicable": true, + "perOpMaximumUsd": "20.0000", + "perOpMaximumNative": "30.000000000000000000", + "sponsorAddress": "0x684E10D67dc4159B813Db70a1DAeB5FcC98bd034", + "invalid_sponsorAddress": "0x684E10D67dc4159B813Db70a1DAeB5FcC98bd03", + "incorrect_sponsorAddress": "0x684D10D67dc4159C813Db70a1DAeB5FaC98bd035", "arka_deposit": "https://arka.etherspot.io/deposit", + "arka_deposit_invalid": "http://arka.etherspot.io/deposit", + "arka_deposit_incorrect": "https://arka.etherspot.io/depos", + "arka_deposit_v2": "https://arka.etherspot.io/deposit/v2", + "arka_deposit_v2_invalid": "http://arka.etherspot.io/deposit/v2", + "arka_deposit_v2_incorrect": "https://arka.etherspot.io/depos/v2", "arka_whitelist": "https://arka.etherspot.io/whitelist", + "arka_whitelist_invalid": "http://arka.etherspot.io/whitelist", + "arka_whitelist_incorrect": "https://arka.etherspot.io/whist", + "arka_whitelist_v2": "https://arka.etherspot.io/whitelist/v2", + "arka_whitelist_v2_invalid": "http://arka.etherspot.io/whitelist/v2", + "arka_whitelist_v2_incorrect": "https://arka.etherspot.io/white/v2", "arka_checkwhitelist": "https://arka.etherspot.io/checkWhitelist", + "arka_checkwhitelist_invalid": "http://arka.etherspot.io/checkWhitelist", + "arka_checkwhitelist_incorrect": "https://arka.etherspot.io/checkWhist", + "arka_checkwhitelist_v2": "https://arka.etherspot.io/checkWhitelist/v2", + "arka_checkwhitelist_v2_invalid": "http://arka.etherspot.io/checkWhitelist/v2", + "arka_checkwhitelist_v2_incorrect": "https://arka.etherspot.io/checkWhite/v2", + "arka_removeWhitelist": "https://arka.etherspot.io/removeWhitelist", + "arka_removeWhitelist_invalid": "http://arka.etherspot.io/removeWhitelist", + "arka_removeWhitelist_incorrect": "https://arka.etherspot.io/removeWhist", + "arka_removeWhitelist_v2": "https://arka.etherspot.io/removeWhitelist/v2", + "arka_removeWhitelist_v2_invalid": "http://arka.etherspot.io/removeWhitelist/v2", + "arka_removeWhitelist_v2_incorrect": "https://arka.etherspot.io/removeWhite/v2", "arka_pimlico": "https://arka.etherspot.io/pimlicoAddress", - "txCount": 50, - "transactionHash": "0x639d74d9a681f19bc3a32ae4b5ee4b2562cec8458508babe0ac727d96b153360", - "invalid_transactionHash": "0x639d74d9a681f19bc3a32ae4b5ee4b2562cec8458508babe0ac727d96b15336", - "incorrect_transactionHash": "0x639d24d9a682f19bc3a32a14b5ee4b2562cec8458508ba1e0ac727d96b153160" + "arka_pimlico_invalid": "http://arka.etherspot.io/pimlicoAddress", + "arka_pimlico_incorrect": "https://arka.etherspot.io/pimlicoess", + "arka_getAllWhitelist_v2": "https://arka.etherspot.io/getAllWhitelist/v2", + "arka_getAllWhitelist_v2_invalid": "http://arka.etherspot.io/getAllWhitelist/v2", + "arka_getAllWhitelist_v2_incorrect": "https://arka.etherspot.io/getAllWhite/v2", + "arka_metadata": "https://arka.etherspot.io/metadata", + "arka_metadata_invalid": "http://arka.etherspot.io/metadata", + "arka_metadata_incorrect": "https://arka.etherspot.io/mata", + "address": "0xE4fAe3bEEeFEDAaC49548869fca6F180fd37CA40", + "blockCount": 15, + "invalid_hex": "1234567890", + "incorrect_hex": "0xC22cF2aA30A0181d6fE4B0B11aab238714Ba54f3" } diff --git a/test/specs/loadAndPerformance/arka/goerli.spec.js b/test/specs/loadAndPerformance/arka/goerli.spec.js deleted file mode 100644 index e2e4026..0000000 --- a/test/specs/loadAndPerformance/arka/goerli.spec.js +++ /dev/null @@ -1,353 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk } from '@etherspot/prime-sdk'; -import { ethers } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import Helper from '../../../utils/helper.js'; -import data from '../../../data/apiTestData.json' assert { type: 'json' }; - -let goerliTestNetSdk; - -/* eslint-disable prettier/prettier */ -describe('Performance testing of Arka Endpoints with Goerli Network', function () { - it('SMOKE: Validate the Whitelist endpoint of Arka on Goerli Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; - const startTime = performance.now(); - - try { - const response = await fetch(data.arka_whitelist, { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [addresses, data.goerli_chainid_testnet, process.env.API_KEY], - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the Deposit endpoint of Arka on Goerli Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch(data.arka_deposit, { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.value, - data.goerli_chainid_testnet, - process.env.API_KEY, - ], - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the Check Whitelist address endpoint with already whitelisted address of Arka on Goerli Network', async function () { - var test = this; - const sponsorAddress = data.address; - const addresses = data.address; - const startTime = performance.now(); - - try { - const response = await fetch(data.arka_checkwhitelist, { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - sponsorAddress, - addresses, - data.goerli_chainid_testnet, - process.env.API_KEY, - ], - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the Check Whitelist address endpoint with non whitelisted address of Arka on Goerli Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const sponsorAddress = data.address; - const addresses = randomAddress.address; - const startTime = performance.now(); - - try { - const response = await fetch(data.arka_checkwhitelist, { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - sponsorAddress, - addresses, - data.goerli_chainid_testnet, - process.env.API_KEY, - ], - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the Pimlico Paymaster endpoint of Arka on Goerli Network', async function () { - var test = this; - // const context = { token: data.usdc_token }; - const startTime = performance.now(); - - try { - const response = await fetch(data.arka_pimlico, { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.entryPointAddress, - { token: 'USDC' }, - data.goerli_chainid_testnet, - process.env.API_KEY, - ], - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Perform the Sponsor a Transaction with Arka and PrimeSDK on the Goerli network', async function () { - var test = this; - const startTime = performance.now(); - - // initializating sdk - try { - goerliTestNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.goerli_chainid_testnet), - projectKey: process.env.PROJECT_KEY_TESTNET, - } - ); - - try { - assert.strictEqual( - goerliTestNetSdk.state.EOAAddress, - data.eoaAddress, - 'The EOA Address is not calculated correctly.' - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The SDK is not initialled successfully.'); - } - - // get EtherspotWallet address - try { - await goerliTestNetSdk.getCounterFactualAddress(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'The Etherspot Wallet Address is not displayed successfully.' - ); - } - - // clear the transaction batch - try { - await goerliTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The transaction of the batch is not clear correctly.'); - } - - // add transactions to the batch - try { - await goerliTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The addition of transaction in the batch is not performed.'); - } - - // get balance of the account address - try { - await goerliTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The balance of the native token is not displayed.'); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await goerliTestNetSdk.estimate({ - url: 'https://arka.etherspot.io/', - api_key: process.env.API_KEY, - context: { mode: 'sponsor' }, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - - assert.fail( - 'The estimate transactions added to the batch and get the fee data for the UserOp is not performed.' - ); - } - - // sign the UserOp and sending to the bundler - let uoHash; - try { - uoHash = await goerliTestNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'The sign the UserOp and sending to the bundler action is not performed.' - ); - } - - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - }); -}); diff --git a/test/specs/loadAndPerformance/arka/loadTesting.spec.js b/test/specs/loadAndPerformance/arka/loadTesting.spec.js deleted file mode 100644 index 2298f45..0000000 --- a/test/specs/loadAndPerformance/arka/loadTesting.spec.js +++ /dev/null @@ -1,12 +0,0 @@ -import saveToDatabase from '../../../utils/arkaCsvToDatabase.js'; - -describe('CSV to Database Test', function () { - it('Saving the Arka CSV data to the database', function (done) { - saveToDatabase(process.env.ARKA_CSV_PATH); - - // Added a delay to ensure the asynchronous operations are completed - setTimeout(() => { - done(); - }, 5000); // Adjust the timeout as needed - }); -}); diff --git a/test/specs/loadAndPerformance/arka/mumbai.spec.js b/test/specs/loadAndPerformance/arka/mumbai.spec.js deleted file mode 100644 index 764a8bd..0000000 --- a/test/specs/loadAndPerformance/arka/mumbai.spec.js +++ /dev/null @@ -1,353 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk } from '@etherspot/prime-sdk'; -import { ethers } from 'ethers'; -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import Helper from '../../../utils/helper.js'; -import data from '../../../data/apiTestData.json' assert { type: 'json' }; - -let mumbaiTestNetSdk; - -/* eslint-disable prettier/prettier */ -describe('Performance testing of Arka Endpoints with Mumbai Network', function () { - it.only('SMOKE: Validate the Whitelist endpoint of Arka on Mumbai Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; - const startTime = performance.now(); - - try { - const response = await fetch(data.arka_whitelist, { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - '0xE05FB316eB8C4ba7288D43c1bd87BE8a8d16761C', - '80002', - process.env.API_KEY, - ], - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the Deposit endpoint of Arka on Mumbai Network', async function () { - var test = this; - - const startTime = performance.now(); - - try { - const response = await fetch(data.arka_deposit, { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.value, - data.mumbai_chainid_testnet, - process.env.API_KEY, - ], - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the Check Whitelist address endpoint with already whitelisted address of Arka on Mumbai Network', async function () { - var test = this; - - const sponsorAddress = data.address; - const addresses = data.address; - const startTime = performance.now(); - - try { - const response = await fetch(data.arka_checkwhitelist, { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - sponsorAddress, - addresses, - data.mumbai_chainid_testnet, - process.env.API_KEY, - ], - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the Check Whitelist address endpoint with non whitelisted address of Arka on Mumbai Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const sponsorAddress = data.address; - const addresses = randomAddress.address; - const startTime = performance.now(); - - try { - const response = await fetch(data.arka_checkwhitelist, { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - sponsorAddress, - addresses, - data.mumbai_chainid_testnet, - process.env.API_KEY, - ], - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the Pimlico Paymaster endpoint of Arka on Mumbai Network', async function () { - var test = this; - - // const context = { token: data.usdc_token }; - const startTime = performance.now(); - - try { - const response = await fetch(data.arka_pimlico, { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - params: [ - data.entryPointAddress, - { token: 'USDC' }, - data.mumbai_chainid_testnet, - process.env.API_KEY, - ], - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Perform the Sponsor a Transaction with Arka and PrimeSDK on the Mumbai network', async function () { - var test = this; - const startTime = performance.now(); - // initializating sdk - try { - mumbaiTestNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.mumbai_chainid_testnet), - projectKey: process.env.PROJECT_KEY_TESTNET, - } - ); - - try { - assert.strictEqual( - mumbaiTestNetSdk.state.EOAAddress, - data.eoaAddress, - 'The EOA Address is not calculated correctly.' - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - } - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The SDK is not initialled successfully.'); - } - - // get EtherspotWallet address - try { - await mumbaiTestNetSdk.getCounterFactualAddress(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'The Etherspot Wallet Address is not displayed successfully.' - ); - } - - // clear the transaction batch - try { - await mumbaiTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The transaction of the batch is not clear correctly.'); - } - - // add transactions to the batch - try { - await mumbaiTestNetSdk.addUserOpsToBatch({ - to: data.recipient, - value: ethers.utils.parseEther(data.value), - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The addition of transaction in the batch is not performed.'); - } - - // get balance of the account address - try { - await mumbaiTestNetSdk.getNativeBalance(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The balance of the native token is not displayed.'); - } - - // estimate transactions added to the batch and get the fee data for the UserOp - let op; - try { - op = await mumbaiTestNetSdk.estimate({ - url: 'https://arka.etherspot.io/', - api_key: process.env.API_KEY, - context: { mode: 'sponsor' }, - }); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'The estimate transactions added to the batch and get the fee data for the UserOp is not performed.' - ); - } - - // sign the UserOp and sending to the bundler - try { - await mumbaiTestNetSdk.send(op); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'The sign the UserOp and sending to the bundler action is not performed.' - ); - } - - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - }); -}); diff --git a/test/specs/loadAndPerformance/skandha/goerli.spec.js b/test/specs/loadAndPerformance/skandha/goerli.spec.js deleted file mode 100644 index 6762fc4..0000000 --- a/test/specs/loadAndPerformance/skandha/goerli.spec.js +++ /dev/null @@ -1,361 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import data from '../../../data/apiTestData.json' assert { type: 'json' }; - -describe('Performance testing of Skandha Endpoints with Goerli Network', function () { - it('SMOKE: Validate the skandha_getGasPrice method of the skandha with valid details on Goerli Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch('https://goerli-bundler.etherspot.io/', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - method: 'skandha_getGasPrice', - params: [], - id: 46, - jsonrpc: '2.0', - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - - try { - assert.isNotEmpty( - returnedValue.result, - 'The result value is empty in the skandha_getGasPrice response.' - ); - } catch (e) { - console.error(e); - assert.fail('Not getting correct response.'); - } - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the skandha_config method of the skandha with valid details on Goerli Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch('https://goerli-bundler.etherspot.io/', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - method: 'skandha_config', - params: [], - id: 46, - jsonrpc: '2.0', - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - - try { - assert.isNotEmpty( - returnedValue.result, - 'The result value is empty in the skandha_config response.' - ); - } catch (e) { - console.error(e); - assert.fail('Not getting correct response.'); - } - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the skandha_feeHistory method of the skandha with valid details on Goerli Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch('https://goerli-bundler.etherspot.io/', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - method: 'skandha_feeHistory', - params: [data.entryPointAddress, data.blockCount, 'latest'], - id: 46, - jsonrpc: '2.0', - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - - try { - assert.isNotEmpty( - returnedValue.result, - 'The result value is empty in the skandha_feeHistory response.' - ); - } catch (e) { - console.error(e); - assert.fail('Not getting correct response.'); - } - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the eth_chainId method of the skandha with valid details on Goerli Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch('https://goerli-bundler.etherspot.io/', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - method: 'eth_chainId', - params: [], - id: 46, - jsonrpc: '2.0', - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - - try { - assert.isNotEmpty( - returnedValue.result, - 'The result value is empty in the eth_chainId response.' - ); - } catch (e) { - console.error(e); - assert.fail('Not getting correct response.'); - } - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the eth_supportedEntryPoints method of the skandha with valid details on Goerli Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch('https://goerli-bundler.etherspot.io/', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - method: 'eth_supportedEntryPoints', - params: [], - id: 46, - jsonrpc: '2.0', - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - - try { - assert.isNotEmpty( - returnedValue.result, - 'The result value is empty in the eth_supportedEntryPoints response.' - ); - } catch (e) { - console.error(e); - assert.fail('Not getting correct response.'); - } - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the Batch RPC calls of the skandha with valid details on Goerli Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch('https://goerli-bundler.etherspot.io/', { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify([ - { method: 'skandha_config' }, - { method: 'eth_chainId' }, - { method: 'eth_supportedEntryPoints' }, - { - method: 'skandha_feeHistory', - params: [data.entryPointAddress, data.blockCount, 'latest'], - }, - ]), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - - try { - assert.isNotEmpty( - returnedValue[0].result, - 'The first result value is empty in the Batch RPC calls response.' - ); - } catch (e) { - console.error(e); - assert.fail( - 'The first result value is not displayed correctly in the Batch RPC calls response.' - ); - } - - try { - assert.isNotEmpty( - returnedValue[1].result, - 'The second result value is empty in the Batch RPC calls response.' - ); - } catch (e) { - console.error(e); - assert.fail( - 'The second result value is not displayed correctly in the Batch RPC calls response.' - ); - } - - try { - assert.isNotEmpty( - returnedValue[2].result, - 'The third result value is empty in the Batch RPC calls response.' - ); - } catch (e) { - console.error(e); - assert.fail( - 'The third result value is not displayed correctly in the Batch RPC calls response.' - ); - } - - try { - assert.isNotEmpty( - returnedValue[3].result, - 'The fourth result value is empty in the Batch RPC calls response.' - ); - } catch (e) { - console.error(e); - assert.fail( - 'The fourth result value is not displayed correctly in the Batch RPC calls response.' - ); - } - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); -}); diff --git a/test/specs/loadAndPerformance/skandha/loadTesting.spec.js b/test/specs/loadAndPerformance/skandha/loadTesting.spec.js deleted file mode 100644 index 59d547f..0000000 --- a/test/specs/loadAndPerformance/skandha/loadTesting.spec.js +++ /dev/null @@ -1,12 +0,0 @@ -import saveToDatabase from '../../../utils/skandhaCsvToDatabase.js'; - -describe('CSV to Database Test', function () { - it('Saving the Skandha CSV data to the database', function (done) { - saveToDatabase(process.env.SKANDHA_CSV_PATH); - - // Added a delay to ensure the asynchronous operations are completed - setTimeout(() => { - done(); - }, 5000); // Adjust the timeout as needed - }); -}); diff --git a/test/specs/loadAndPerformance/skandha/mumbai.spec.js b/test/specs/loadAndPerformance/skandha/mumbai.spec.js deleted file mode 100644 index 7e4862d..0000000 --- a/test/specs/loadAndPerformance/skandha/mumbai.spec.js +++ /dev/null @@ -1,361 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { assert } from 'chai'; -import addContext from 'mochawesome/addContext.js'; -import data from '../../../data/apiTestData.json' assert { type: 'json' }; - -describe('Performance testing of Skandha Endpoints with Mumbai Network', function () { - it('SMOKE: Validate the skandha_getGasPrice method of the skandha with valid details on Mumbai Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch('https://mumbai-bundler.etherspot.io/', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - method: 'skandha_getGasPrice', - params: [], - id: 46, - jsonrpc: '2.0', - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - - try { - assert.isNotEmpty( - returnedValue.result, - 'The result value is empty in the skandha_getGasPrice response.' - ); - } catch (e) { - console.error(e); - assert.fail('Not getting correct response.'); - } - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the skandha_config method of the skandha with valid details on Mumbai Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch('https://mumbai-bundler.etherspot.io/', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - method: 'skandha_config', - params: [], - id: 46, - jsonrpc: '2.0', - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - - try { - assert.isNotEmpty( - returnedValue.result, - 'The result value is empty in the skandha_config response.' - ); - } catch (e) { - console.error(e); - assert.fail('Not getting correct response.'); - } - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the skandha_feeHistory method of the skandha with valid details on Mumbai Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch('https://mumbai-bundler.etherspot.io/', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - method: 'skandha_feeHistory', - params: [data.entryPointAddress, data.blockCount, 'latest'], - id: 46, - jsonrpc: '2.0', - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - - try { - assert.isNotEmpty( - returnedValue.result, - 'The result value is empty in the skandha_feeHistory response.' - ); - } catch (e) { - console.error(e); - assert.fail('Not getting correct response.'); - } - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the eth_chainId method of the skandha with valid details on Mumbai Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch('https://mumbai-bundler.etherspot.io/', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - method: 'eth_chainId', - params: [], - id: 46, - jsonrpc: '2.0', - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - - try { - assert.isNotEmpty( - returnedValue.result, - 'The result value is empty in the eth_chainId response.' - ); - } catch (e) { - console.error(e); - assert.fail('Not getting correct response.'); - } - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the eth_supportedEntryPoints method of the skandha with valid details on Mumbai Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch('https://mumbai-bundler.etherspot.io/', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - method: 'eth_supportedEntryPoints', - params: [], - id: 46, - jsonrpc: '2.0', - }), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - - try { - assert.isNotEmpty( - returnedValue.result, - 'The result value is empty in the eth_supportedEntryPoints response.' - ); - } catch (e) { - console.error(e); - assert.fail('Not getting correct response.'); - } - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); - - it('SMOKE: Validate the Batch RPC calls of the skandha with valid details on mumbai Network', async function () { - var test = this; - const startTime = performance.now(); - - try { - const response = await fetch('https://mumbai-bundler.etherspot.io/', { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - body: JSON.stringify([ - { method: 'skandha_config' }, - { method: 'eth_chainId' }, - { method: 'eth_supportedEntryPoints' }, - { - method: 'skandha_feeHistory', - params: [data.entryPointAddress, data.blockCount, 'latest'], - }, - ]), - }); - if (!response.ok) { - console.error('Response status:', response.status); - addContext(test, 'Response status: ' + response.status); - const errorResponse = await response.text(); - console.error('Error response:', errorResponse); - addContext(test, 'Error response:' + errorResponse); - assert.fail('Getting an error'); - } else { - addContext(test, 'Response status: ' + response.status); - const ttfb_ms = performance.now() - startTime; // Calculate TTFB in milliseconds - const ttfb_s = (ttfb_ms / 1000).toFixed(2); - - addContext(test, 'Time to First Byte (TTFB): ' + ttfb_s + ' second'); - const returnedValue = await response.json(); - const returnedValueString = JSON.stringify(returnedValue); - addContext(test, 'Value returned: ' + returnedValueString); - - try { - assert.isNotEmpty( - returnedValue[0].result, - 'The first result value is empty in the Batch RPC calls response.' - ); - } catch (e) { - console.error(e); - assert.fail( - 'The first result value is not displayed correctly in the Batch RPC calls response.' - ); - } - - try { - assert.isNotEmpty( - returnedValue[1].result, - 'The second result value is empty in the Batch RPC calls response.' - ); - } catch (e) { - console.error(e); - assert.fail( - 'The second result value is not displayed correctly in the Batch RPC calls response.' - ); - } - - try { - assert.isNotEmpty( - returnedValue[2].result, - 'The third result value is empty in the Batch RPC calls response.' - ); - } catch (e) { - console.error(e); - assert.fail( - 'The third result value is not displayed correctly in the Batch RPC calls response.' - ); - } - - try { - assert.isNotEmpty( - returnedValue[3].result, - 'The fourth result value is empty in the Batch RPC calls response.' - ); - } catch (e) { - console.error(e); - assert.fail( - 'The fourth result value is not displayed correctly in the Batch RPC calls response.' - ); - } - } - } catch (e) { - console.error('Fetch error:', e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('Getting an error'); - } - }); -}); diff --git a/test/specs/mainnet/paymasterAPIs/arka/checkWhitelist.spec.js b/test/specs/mainnet/paymasterAPIs/arka/checkWhitelist.spec.js new file mode 100644 index 0000000..faa41f2 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/arka/checkWhitelist.spec.js @@ -0,0 +1,790 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import Helper from '../../../../utils/helper.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the check whitelist endpoint of the Arka', function () { + it( + 'SMOKE: Validate the Check Whitelist endpoint which was already whitelisted with v1 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_1); + } + + // wait for the few seconds + Helper.wait(15000); + + // check the whitelist status + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_1); + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v1 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // check the whitelist status + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_1); + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_invalid, // invalid url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_2); + console.log(message.fail_checkWhitelistv1_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv1_2); + console.log(message.vali_checkWhitelistv1_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_incorrect, // incorrect url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_3); + console.log(message.fail_checkWhitelistv1_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv1_3); + console.log(message.vali_checkWhitelistv1_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [ + data.invalid_sponsorAddress, // invalid address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_4); + console.log(message.fail_checkWhitelistv1_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_checkWhitelistv1_4); + console.log(message.vali_checkWhitelistv1_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_44); + console.log(message.fail_checkWhitelistv1_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_checkWhitelistv1_44); + console.log(message.vali_checkWhitelistv1_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [ + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_5); + console.log(message.fail_checkWhitelistv1_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_5); + console.log(message.vali_checkWhitelistv1_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [addresses, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_6); + console.log(message.fail_checkWhitelistv1_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_6); + console.log(message.vali_checkWhitelistv1_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [addresses, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_7); + console.log(message.fail_checkWhitelistv1_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_7); + console.log(message.vali_checkWhitelistv1_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [addresses, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_8); + console.log(message.fail_checkWhitelistv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_8); + console.log(message.vali_checkWhitelistv1_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_8); + } + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was already whitelisted with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_1); + } + + // wait for the few seconds + Helper.wait(15000); + + // check the whitelist status + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_1); + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // check the whitelist status + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2_invalid, // invalid url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_2); + console.log(message.fail_checkWhitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv2_2); + console.log(message.vali_checkWhitelistv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2_incorrect, // incorrect url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_3); + console.log(message.fail_checkWhitelistv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv2_3); + console.log(message.vali_checkWhitelistv2_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [ + data.invalid_sponsorAddress, // invalid address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_4); + console.log(message.fail_checkWhitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_checkWhitelistv2_4); + console.log(message.vali_checkWhitelistv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_4); + console.log(message.fail_checkWhitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_checkWhitelistv2_4); + console.log(message.vali_checkWhitelistv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [ + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_5); + console.log(message.fail_checkWhitelistv2_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv2_5); + console.log(message.vali_checkWhitelistv2_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [addresses, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_6); + console.log(message.fail_checkWhitelistv2_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv2_6); + console.log(message.vali_checkWhitelistv2_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [addresses, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_7); + console.log(message.fail_checkWhitelistv2_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv2_7); + console.log(message.vali_checkWhitelistv2_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [addresses, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_8); + console.log(message.fail_checkWhitelistv2_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv2_8); + console.log(message.vali_checkWhitelistv2_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_8); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/arka/deposit.spec.js b/test/specs/mainnet/paymasterAPIs/arka/deposit.spec.js new file mode 100644 index 0000000..9009b2c --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/arka/deposit.spec.js @@ -0,0 +1,648 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the deposit endpoint of the Arka', function () { + it( + 'SMOKE: Validate the deposit endpoint with v1 of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_deposit, + { + params: [data.value, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.deposit_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_1); + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v1 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_invalid, // invalid url + { + params: [data.value, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_depositv1_2); + console.log(message.fail_depositv1_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_depositv1_2); + console.log(message.vali_depositv1_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v1 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_incorrect, // incorrect url + { + params: [data.value, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_depositv1_3); + console.log(message.fail_depositv1_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_depositv1_3); + console.log(message.vali_depositv1_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_3); + } + } + } + ); + + it.only( + 'REGRESSION: Validate the deposit endpoint with v1 and invalid value of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit, + { + params: [ + data.invalidValue, // invalid value + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_depositv1_4); + console.log(message.fail_depositv1_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_depositv1_4); + console.log(message.vali_depositv1_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_4); + } + } + } + ); + + it.only( + 'REGRESSION: Validate the deposit endpoint with v1 and exceeded value of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit, + { + params: [ + data.exceededValue, // exceeded value + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_depositv1_44); + console.log(message.fail_depositv1_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deposit_3)) { + addContext(test, message.vali_depositv1_44); + console.log(message.vali_depositv1_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_44); + } + } + } + ); + + it.only( + 'REGRESSION: Validate the deposit endpoint with v1 and without value of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit, + { + params: [ + randomChainId, // without value + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_depositv1_5); + console.log(message.fail_depositv1_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv1_5); + console.log(message.vali_depositv1_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v1 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit, + { + params: [data.value, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_depositv1_6); + console.log(message.fail_depositv1_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv1_6); + console.log(message.vali_depositv1_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v1 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit, + { + params: [data.value, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_depositv1_7); + console.log(message.fail_depositv1_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv1_7); + console.log(message.vali_depositv1_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit, + { + params: [data.value, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_depositv1_8); + console.log(message.fail_depositv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv1_8); + console.log(message.vali_depositv1_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_8); + } + } + } + ); + + it( + 'SMOKE: Validate the deposit endpoint with v2 of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [data.value, randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.deposit_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2_invalid, // invalid url + { + params: [data.value, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_depositv2_2); + console.log(message.fail_depositv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_depositv2_2); + console.log(message.vali_depositv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2_incorrect, // incorrect url + { + params: [data.value, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_depositv2_3); + console.log(message.fail_depositv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_depositv2_3); + console.log(message.vali_depositv2_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_3); + } + } + } + ); + + it.only( + 'REGRESSION: Validate the deposit endpoint with v2 and invalid value of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [ + data.invalidValue, // invalid value + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_depositv2_4); + console.log(message.fail_depositv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_depositv2_4); + console.log(message.vali_depositv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_4); + } + } + } + ); + + it.only( + 'REGRESSION: Validate the deposit endpoint with v2 and exceeded value of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [ + data.exceededValue, // exceeded value + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_depositv2_4); + console.log(message.fail_depositv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deposit_3)) { + addContext(test, message.vali_depositv2_4); + console.log(message.vali_depositv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_4); + } + } + } + ); + + it.only( + 'REGRESSION: Validate the deposit endpoint with v2 and without value of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [ + randomChainId, // without value + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_depositv2_5); + console.log(message.fail_depositv2_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_5); + console.log(message.vali_depositv2_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [data.value, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_depositv2_6); + console.log(message.fail_depositv2_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_6); + console.log(message.vali_depositv2_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [data.value, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_depositv2_7); + console.log(message.fail_depositv2_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_7); + console.log(message.vali_depositv2_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [data.value, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_depositv2_8); + console.log(message.fail_depositv2_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_8); + console.log(message.vali_depositv2_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_8); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/arka/getAllWhitelist.spec.js b/test/specs/mainnet/paymasterAPIs/arka/getAllWhitelist.spec.js new file mode 100644 index 0000000..275b4a2 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/arka/getAllWhitelist.spec.js @@ -0,0 +1,296 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import Helper from '../../../../utils/helper.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the get all whitelist endpoint of the Arka', function () { + it( + 'SMOKE: Validate the get all whitelist endpoint with whitelisted address and v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_1); + } + + // wait for the few seconds + Helper.wait(15000); + + // validate the get all whitelist endpoint + try { + const response = await axios.post( + data.arka_getAllWhitelist_v2, + { + params: ['1', randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.isNotEmpty( + response.addresses, + message.vali_getAllWhitelist_addresses + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_2); + } + } + ); + + it( + 'REGRESSION: Validate the get all whitelist endpoint with new random address and v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + try { + const response = await axios.post( + data.arka_getAllWhitelist_v2, + { + params: ['1', randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + addContext(test, message.fail_getAllWhitelistv2_9); + console.log(message.fail_getAllWhitelistv2_9); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.getAllWhitelist_1)) { + addContext(test, message.vali_getAllWhitelistv2_1); + console.log(message.vali_getAllWhitelistv2_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_9); + } + } + } + ); + + it( + 'REGRESSION: Validate the get all whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_getAllWhitelist_invalid, // invalid url + { + params: ['1', randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_getAllWhitelistv2_2); + console.log(message.fail_getAllWhitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_getAllWhitelistv2_2); + console.log(message.vali_getAllWhitelistv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the get all whitelist endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_getAllWhitelist_incorrect, // incorrect url + { + params: ['1', randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_getAllWhitelistv2_3); + console.log(message.fail_getAllWhitelistv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_getAllWhitelistv2_3); + console.log(message.vali_getAllWhitelistv2_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the get all whitelist endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_getAllWhitelist, + { + params: ['1', randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_getAllWhitelistv2_6); + console.log(message.fail_getAllWhitelistv2_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_getAllWhitelistv2_6); + console.log(message.vali_getAllWhitelistv2_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the get all whitelist endpoint with v2 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_getAllWhitelist, + { + params: ['1', randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_getAllWhitelistv2_7); + console.log(message.fail_getAllWhitelistv2_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_getAllWhitelistv2_7); + console.log(message.vali_getAllWhitelistv2_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the get all whitelist endpoint with v2 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_getAllWhitelist, + { + params: ['1', process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_getAllWhitelistv2_8); + console.log(message.fail_getAllWhitelistv2_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_getAllWhitelistv2_8); + console.log(message.vali_getAllWhitelistv2_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_8); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/arka/metadata.spec.js b/test/specs/mainnet/paymasterAPIs/arka/metadata.spec.js new file mode 100644 index 0000000..0c1c94f --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/arka/metadata.spec.js @@ -0,0 +1,264 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the metadata endpoint of the Arka', function () { + it( + 'SMOKE: Validate the metadata endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + // validate the metadata endpoint + try { + const response = await axios.get( + data.arka_metadata, + { + params: { chainId: randomChainId, apiKey: process.env.API_KEY }, + }, + header + ); + + // validate the sponsorAddress parameter in the response + assert.isNotEmpty( + response.data.sponsorAddress, + message.vali_metadata_sponsorAddress + ); + + // validate the sponsorWalletBalance parameter in the response + assert.isNotEmpty( + response.data.sponsorWalletBalance, + message.vali_metadata_sponsorWalletBalance + ); + + // validate the sponsorBalance parameter in the response + assert.isNotEmpty( + response.data.sponsorBalance, + message.vali_metadata_sponsorBalance + ); + + // validate the chainsSupported parameter in the response + assert.isNotEmpty( + response.data.chainsSupported, + message.vali_metadata_chainsSupported + ); + + // validate the tokenPaymasters parameter in the response + assert.isNotEmpty( + response.data.tokenPaymasters, + message.vali_metadata_tokenPaymasters + ); + + // validate the multiTokenPaymasters parameter in the response + assert.isNotEmpty( + response.data.multiTokenPaymasters, + message.vali_metadata_multiTokenPaymasters + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_1); + } + } + ); + + it( + 'REGRESSION: Validate the Metadata endpoint invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_metadata_invalid, // invalid url + { + params: { chainId: randomChainId, apiKey: process.env.API_KEY }, + }, + header + ); + + addContext(test, message.fail_metadata_2); + console.log(message.fail_metadata_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_metadata_2); + console.log(message.vali_metadata_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Metadata endpoint incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_metadata_incorrect, // incorrect url + { + params: { chainId: randomChainId, apiKey: process.env.API_KEY }, + }, + header + ); + + addContext(test, message.fail_metadata_3); + console.log(message.fail_metadata_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_metadata_3); + console.log(message.vali_metadata_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Metadata endpoint invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_metadata, + { + params: { chainId: randomChainId, apiKey: 'arka_public' }, // invalid apikey + }, + header + ); + + addContext(test, message.fail_metadata_6); + console.log(message.fail_metadata_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_metadata_6); + console.log(message.vali_metadata_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Metadata endpoint without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_metadata, + { + params: { chainId: randomChainId }, // without apikey + }, + header + ); + + addContext(test, message.fail_metadata_7); + console.log(message.fail_metadata_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_metadata_7); + console.log(message.vali_metadata_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Metadata endpoint without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_metadata, + { + params: { apiKey: process.env.API_KEY }, // without chainid + }, + header + ); + + addContext(test, message.fail_metadata_8); + console.log(message.fail_metadata_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_metadata_8); + console.log(message.vali_metadata_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_8); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/arka/pimlicoAddress.spec.js b/test/specs/mainnet/paymasterAPIs/arka/pimlicoAddress.spec.js new file mode 100644 index 0000000..62b0695 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/arka/pimlicoAddress.spec.js @@ -0,0 +1,479 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import Helper from '../../../../utils/helper.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the pimlico address endpoint of the Arka', function () { + it( + 'SMOKE: Validate the pimlico address endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_1); + } + + // wait for the few seconds + Helper.wait(15000); + + // check the whitelist status + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + addresses, + { token: data.usdc_token }, + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_1); + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint with invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico_invalid, // invalid url + { + params: [ + addresses, + { token: data.usdc_token }, + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_2); + console.log(message.fail_pimlicoAddress_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_pimlicoAddress_2); + console.log(message.vali_pimlicoAddress_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint with incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico_incorrect, // incorrect url + { + params: [ + addresses, + { token: data.usdc_token }, + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_3); + console.log(message.fail_pimlicoAddress_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_pimlicoAddress_3); + console.log(message.vali_pimlicoAddress_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint with invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + data.invalid_sponsorAddress, // invalid address + { token: data.usdc_token }, + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_4); + console.log(message.fail_pimlicoAddress_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_pimlicoAddress_4); + console.log(message.vali_pimlicoAddress_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint with incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + { token: data.usdc_token }, + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_44); + console.log(message.fail_pimlicoAddress_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_pimlicoAddress_44); + console.log(message.vali_pimlicoAddress_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + { token: data.usdc_token }, + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_5); + console.log(message.fail_pimlicoAddress_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_pimlicoAddress_5); + console.log(message.vali_pimlicoAddress_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint with invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + addresses, + { token: data.usdc_token }, + randomChainId, + 'arka_public', + ], // invalid apikey + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_6); + console.log(message.fail_pimlicoAddress_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_pimlicoAddress_6); + console.log(message.vali_pimlicoAddress_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [addresses, { token: data.usdc_token }, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_7); + console.log(message.fail_pimlicoAddress_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_pimlicoAddress_7); + console.log(message.vali_pimlicoAddress_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + addresses, + { token: data.usdc_token }, + process.env.API_KEY, + ], // without chainid + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_8); + console.log(message.fail_pimlicoAddress_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_pimlicoAddress_8); + console.log(message.vali_pimlicoAddress_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_8); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint with invalid token of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + addresses, + { token: data.invalid_usdc_token }, // invalid token + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_9); + console.log(message.fail_pimlicoAddress_9); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_pimlicoAddress_9); + console.log(message.vali_pimlicoAddress_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_9); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint without token of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + addresses, + // without token + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_10); + console.log(message.fail_pimlicoAddress_10); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_pimlicoAddress_10); + console.log(message.vali_pimlicoAddress_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_10); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/arka/removeWhitelist.spec.js b/test/specs/mainnet/paymasterAPIs/arka/removeWhitelist.spec.js new file mode 100644 index 0000000..2803623 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/arka/removeWhitelist.spec.js @@ -0,0 +1,798 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import Helper from '../../../../utils/helper.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the remove whitelist endpoint of the Arka', function () { + it( + 'SMOKE: Validate the Remove Whitelist endpoint which was already whitelisted with v1 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_1); + } + + // wait for the few seconds + Helper.wait(15000); + + // validate the remove whitelist endpoint + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.remove_whitelist_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_1); + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint which was not whitelisted with v1 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // validate remove whitelist endpoint + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_0); + console.log(message.fail_removeWhitelistv1_0); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_invalid, // invalid url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_2); + console.log(message.fail_removeWhitelistv1_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_removeWhitelistv1_2); + console.log(message.vali_removeWhitelistv1_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_incorrect, // incorrect url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_3); + console.log(message.fail_removeWhitelistv1_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_removeWhitelistv1_3); + console.log(message.vali_removeWhitelistv1_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [ + data.invalid_sponsorAddress, // invalid address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_4); + console.log(message.fail_removeWhitelistv1_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_removeWhitelistv1_4); + console.log(message.vali_removeWhitelistv1_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_44); + console.log(message.fail_removeWhitelistv1_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_removeWhitelistv1_44); + console.log(message.vali_removeWhitelistv1_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [ + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_5); + console.log(message.fail_removeWhitelistv1_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_5); + console.log(message.vali_removeWhitelistv1_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [addresses, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_6); + console.log(message.fail_removeWhitelistv1_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_6); + console.log(message.vali_removeWhitelistv1_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [addresses, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_7); + console.log(message.fail_removeWhitelistv1_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_7); + console.log(message.vali_removeWhitelistv1_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [addresses, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_8); + console.log(message.fail_removeWhitelistv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_8); + console.log(message.vali_removeWhitelistv1_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_8); + } + } + } + ); + + it( + 'SMOKE: Validate the Remove Whitelist endpoint which was already whitelisted with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_1); + } + + // wait for the few seconds + Helper.wait(15000); + + // validate the remove whitelist endpoint + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.remove_whitelist_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint which was not whitelisted with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // validate remove whitelist endpoint + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_2); + console.log(message.fail_removeWhitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_1); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2_invalid, // invalid url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_2); + console.log(message.fail_removeWhitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_removeWhitelistv2_2); + console.log(message.vali_removeWhitelistv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2_incorrect, // incorrect url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_3); + console.log(message.fail_removeWhitelistv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_removeWhitelistv2_3); + console.log(message.vali_removeWhitelistv2_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [ + data.invalid_sponsorAddress, // invalid address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_4); + console.log(message.fail_removeWhitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_removeWhitelistv2_4); + console.log(message.vali_removeWhitelistv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_44); + console.log(message.fail_removeWhitelistv2_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_removeWhitelistv2_44); + console.log(message.vali_removeWhitelistv2_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [ + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_5); + console.log(message.fail_removeWhitelistv2_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv2_5); + console.log(message.vali_removeWhitelistv2_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [addresses, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_6); + console.log(message.fail_removeWhitelistv2_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv2_6); + console.log(message.vali_removeWhitelistv2_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [addresses, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_7); + console.log(message.fail_removeWhitelistv2_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv2_7); + console.log(message.vali_removeWhitelistv2_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [addresses, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_8); + console.log(message.fail_removeWhitelistv2_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv2_8); + console.log(message.vali_removeWhitelistv2_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_8); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/arka/whitelist.spec.js b/test/specs/mainnet/paymasterAPIs/arka/whitelist.spec.js new file mode 100644 index 0000000..dc3a8af --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/arka/whitelist.spec.js @@ -0,0 +1,671 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the whitelist endpoint of the Arka', function () { + it( + 'SMOKE: Validate the Whitelist endpoint with v1 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_1); + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_invalid, // invalid url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_whitelistv1_2); + console.log(message.fail_whitelistv1_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_whitelistv1_2); + console.log(message.vali_whitelistv1_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_incorrect, // incorrect url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_whitelistv1_3); + console.log(message.fail_whitelistv1_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_whitelistv1_3); + console.log(message.vali_whitelistv1_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [ + data.invalid_sponsorAddress, // invalid address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_whitelistv1_4); + console.log(message.fail_whitelistv1_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_whitelistv1_4); + console.log(message.vali_whitelistv1_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_whitelistv1_44); + console.log(message.fail_whitelistv1_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_whitelistv1_44); + console.log(message.vali_whitelistv1_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [ + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_whitelistv1_5); + console.log(message.fail_whitelistv1_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv1_5); + console.log(message.vali_whitelistv1_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [addresses, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_whitelistv1_6); + console.log(message.fail_whitelistv1_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv1_6); + console.log(message.vali_whitelistv1_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [addresses, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_whitelistv1_7); + console.log(message.fail_whitelistv1_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv1_7); + console.log(message.vali_whitelistv1_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [addresses, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_whitelistv1_8); + console.log(message.fail_whitelistv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv1_8); + console.log(message.vali_whitelistv1_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_8); + } + } + } + ); + + it( + 'SMOKE: Validate the Whitelist endpoint with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_v2_invalid, // invalid url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_whitelistv2_2); + console.log(message.fail_whitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_whitelistv2_2); + console.log(message.vali_whitelistv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_v2_incorrect, // incorrect url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_whitelistv2_3); + console.log(message.fail_whitelistv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_whitelistv2_3); + console.log(message.vali_whitelistv2_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [ + data.invalid_sponsorAddress, // invalid address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_whitelistv2_4); + console.log(message.fail_whitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_whitelistv2_4); + console.log(message.vali_whitelistv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_whitelistv2_4); + console.log(message.fail_whitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_whitelistv2_4); + console.log(message.vali_whitelistv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [ + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_whitelistv2_5); + console.log(message.fail_whitelistv2_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv2_5); + console.log(message.vali_whitelistv2_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [addresses, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_whitelistv2_6); + console.log(message.fail_whitelistv2_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv2_6); + console.log(message.vali_whitelistv2_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [addresses, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_whitelistv2_7); + console.log(message.fail_whitelistv2_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv2_7); + console.log(message.vali_whitelistv2_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [addresses, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_whitelistv2_8); + console.log(message.fail_whitelistv2_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv2_8); + console.log(message.vali_whitelistv2_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_8); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/skandha/eth_estimateUserOperationGas.spec.js b/test/specs/mainnet/paymasterAPIs/skandha/eth_estimateUserOperationGas.spec.js new file mode 100644 index 0000000..6c6790d --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/skandha/eth_estimateUserOperationGas.spec.js @@ -0,0 +1,2261 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import { PrimeSdk, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers } from 'ethers'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import helper from '../../../../utils/helper.js'; + +let mainnetPrimeSdk; +let sender; +let nonce; +let initCode; +let callData; +let callGasLimit; +let verificationGasLimit; +let maxFeePerGas; +let maxPriorityFeePerGas; +let paymasterAndData; +let preVerificationGas; +let signature; + +//define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the estimate user operation gas endpoint of the skandha', function () { + it( + 'PRECONDITION: Perform the transfer native token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + //wait for the execution + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + + sender = op.sender; + nonce = op.nonce; + initCode = op.initCode; + callData = op.callData; + callGasLimit = op.callGasLimit; + verificationGasLimit = op.verificationGasLimit; + maxFeePerGas = op.maxFeePerGas; + maxPriorityFeePerGas = op.maxPriorityFeePerGas; + paymasterAndData = op.paymasterAndData; + preVerificationGas = op.preVerificationGas; + signature = op.signature; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + } + ); + + it( + 'SMOKE: Validate the eth_estimateUserOperationGas endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + // Add assertions + try { + assert.isNumber( + response.data.id, + message.vali_skandha_estimateUserOperationGas_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.preVerificationGas, + message.vali_skandha_estimateUserOperationGas_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.verificationGasLimit, + message.vali_skandha_estimateUserOperationGas_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.callGasLimit, + message.vali_skandha_estimateUserOperationGas_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.verificationGas, + message.vali_skandha_estimateUserOperationGas_verificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.maxFeePerGas, + message.vali_skandha_estimateUserOperationGas_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.maxPriorityFeePerGas, + message.vali_skandha_estimateUserOperationGas_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.validUntil, + message.vali_skandha_estimateUserOperationGas_validUntil + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_1); + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid sender on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: data.invalidSender, // invalid sender + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_2); + console.log(message.fail_skandha_estimateUserOperationGas_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_2); + console.log(message.vali_skandha_estimateUserOperationGas_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect sender on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: data.incorrectSender, // incorrect sender + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_3); + console.log(message.fail_skandha_estimateUserOperationGas_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.skandha_error_2)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_3); + console.log(message.vali_skandha_estimateUserOperationGas_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without sender on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + nonce: nonce, // without sender + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_3); + console.log(message.fail_skandha_estimateUserOperationGas_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_3); + console.log(message.vali_skandha_estimateUserOperationGas_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid nonce on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: data.invalid_hex, // invalid nonce + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_5); + console.log(message.fail_skandha_estimateUserOperationGas_5); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_5); + console.log(message.vali_skandha_estimateUserOperationGas_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect nonce on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: data.incorrect_hex, // incorrect nonce + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_6); + console.log(message.fail_skandha_estimateUserOperationGas_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_6); + console.log(message.vali_skandha_estimateUserOperationGas_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without nonce on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, // without nonce + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_7); + console.log(message.fail_skandha_estimateUserOperationGas_7); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_7); + console.log(message.vali_skandha_estimateUserOperationGas_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid initCode on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: data.invalid_hex, // invalid initCode + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_8); + console.log(message.fail_skandha_estimateUserOperationGas_8); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_8); + console.log(message.vali_skandha_estimateUserOperationGas_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_8); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect initCode on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: data.incorrect_hex, // incorrect initCode + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_9); + console.log(message.fail_skandha_estimateUserOperationGas_9); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_9); + console.log(message.vali_skandha_estimateUserOperationGas_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_9); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without initCode on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, // without initCode + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_10); + console.log(message.fail_skandha_estimateUserOperationGas_10); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_10); + console.log(message.vali_skandha_estimateUserOperationGas_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_10); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid callData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: data.invalid_hex, // invalid callData + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_11); + console.log(message.fail_skandha_estimateUserOperationGas_11); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.skandha_error_2)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_11); + console.log(message.vali_skandha_estimateUserOperationGas_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_11); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect callData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: data.incorrect_hex, // incorrect callData + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_12); + console.log(message.fail_skandha_estimateUserOperationGas_12); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_12); + console.log(message.vali_skandha_estimateUserOperationGas_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_12); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without callData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, // without callData + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_13); + console.log(message.fail_skandha_estimateUserOperationGas_13); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_13); + console.log(message.vali_skandha_estimateUserOperationGas_13); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_13); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid callGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: data.invalid_hex, // invalid callGasLimit + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_14); + console.log(message.fail_skandha_estimateUserOperationGas_14); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_14); + console.log(message.vali_skandha_estimateUserOperationGas_14); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_14); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect callGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: data.incorrect_hex, // incorrect callGasLiit + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_15); + console.log(message.fail_skandha_estimateUserOperationGas_15); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_15); + console.log(message.vali_skandha_estimateUserOperationGas_15); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_15); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without callGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, // without callGasLimit + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_16); + console.log(message.fail_skandha_estimateUserOperationGas_16); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_16); + console.log(message.vali_skandha_estimateUserOperationGas_16); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_16); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid verificationGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: data.invalid_hex, // invalid verificationGasLimit + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_17); + console.log(message.fail_skandha_estimateUserOperationGas_17); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_17); + console.log(message.vali_skandha_estimateUserOperationGas_17); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_17); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect verificationGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: data.incorrect_hex, // incorrect verificationGasLimit + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_18); + console.log(message.fail_skandha_estimateUserOperationGas_18); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_18); + console.log(message.vali_skandha_estimateUserOperationGas_18); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_18); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without verificationGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, // without verificationGasLimit + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_19); + console.log(message.fail_skandha_estimateUserOperationGas_19); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_19); + console.log(message.vali_skandha_estimateUserOperationGas_19); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_19); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid preVerificationGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: data.invalid_hex, // invalid preVerificationGas + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_20); + console.log(message.fail_skandha_estimateUserOperationGas_20); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_20); + console.log(message.vali_skandha_estimateUserOperationGas_20); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_20); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect preVerificationGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: data.incorrect_hex, // incorrect preVerificationGas + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_21); + console.log(message.fail_skandha_estimateUserOperationGas_21); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_21); + console.log(message.vali_skandha_estimateUserOperationGas_21); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_21); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without preVerificationGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, // without preVerificationGas + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_22); + console.log(message.fail_skandha_estimateUserOperationGas_22); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_22); + console.log(message.vali_skandha_estimateUserOperationGas_22); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_22); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid maxPriorityFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: data.invalid_hex, // invalid maxPriorityFeePerGas + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_23); + console.log(message.fail_skandha_estimateUserOperationGas_23); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_23); + console.log(message.vali_skandha_estimateUserOperationGas_23); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_23); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect maxPriorityFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: data.incorrect_hex, // incorrect maxPriorityFeePerGas + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_24); + console.log(message.fail_skandha_estimateUserOperationGas_24); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_24); + console.log(message.vali_skandha_estimateUserOperationGas_24); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_24); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without maxPriorityFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, // without maxPriorityFeePerGas + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_25); + console.log(message.fail_skandha_estimateUserOperationGas_25); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_25); + console.log(message.vali_skandha_estimateUserOperationGas_25); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_25); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid maxFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: data.invalid_hex, // invalid maxFeePerGas + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_26); + console.log(message.fail_skandha_estimateUserOperationGas_26); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_26); + console.log(message.vali_skandha_estimateUserOperationGas_26); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_26); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect maxFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: data.incorrect_hex, // incorrect maxFeePerGas + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_27); + console.log(message.fail_skandha_estimateUserOperationGas_27); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_27); + console.log(message.vali_skandha_estimateUserOperationGas_27); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_27); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without maxFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, // without maxFeePerGas + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_28); + console.log(message.fail_skandha_estimateUserOperationGas_28); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_28); + console.log(message.vali_skandha_estimateUserOperationGas_28); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_28); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid paymasterAndData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: data.invalid_hex, // invalid paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_29); + console.log(message.fail_skandha_estimateUserOperationGas_29); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_29); + console.log(message.vali_skandha_estimateUserOperationGas_29); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_29); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect paymasterAndData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: data.incorrect_hex, // incorrect paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_30); + console.log(message.fail_skandha_estimateUserOperationGas_30); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_30); + console.log(message.vali_skandha_estimateUserOperationGas_30); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_30); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without paymasterAndData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, // without paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_31); + console.log(message.fail_skandha_estimateUserOperationGas_31); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_31); + console.log(message.vali_skandha_estimateUserOperationGas_31); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_31); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid signature on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: data.invalid_hex, // invalid signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_32); + console.log(message.fail_skandha_estimateUserOperationGas_32); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.skandha_error_2)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_32); + console.log(message.vali_skandha_estimateUserOperationGas_32); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_32); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect signature on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: data.incorrect_hex, // incorrect signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_33); + console.log(message.fail_skandha_estimateUserOperationGas_33); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_33); + console.log(message.vali_skandha_estimateUserOperationGas_33); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_33); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without signature on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, // without signature + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_34); + console.log(message.fail_skandha_estimateUserOperationGas_34); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_34); + console.log(message.vali_skandha_estimateUserOperationGas_34); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_34); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.invalidEntryPointAddress, // invalid entry point address + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_35); + console.log(message.fail_skandha_estimateUserOperationGas_35); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_35); + console.log(message.vali_skandha_estimateUserOperationGas_35); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_35); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.incorrectentryPointAddress, // incorrect entry point address + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_36); + console.log(message.fail_skandha_estimateUserOperationGas_36); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_36); + console.log(message.vali_skandha_estimateUserOperationGas_36); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_36); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + ], // without entry point address + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_37); + console.log(message.fail_skandha_estimateUserOperationGas_37); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_37); + console.log(message.vali_skandha_estimateUserOperationGas_37); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_37); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'Eth_EstimateUserOperationGas', // invalid method name + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_38); + console.log(message.fail_skandha_estimateUserOperationGas_38); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_38); + console.log(message.vali_skandha_estimateUserOperationGas_38); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_38); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperati', // incorrect method name + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_39); + console.log(message.fail_skandha_estimateUserOperationGas_39); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_39); + console.log(message.vali_skandha_estimateUserOperationGas_39); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_39); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: '', // without method name + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_40); + console.log(message.fail_skandha_estimateUserOperationGas_40); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_40); + console.log(message.vali_skandha_estimateUserOperationGas_40); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_40); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/skandha/eth_getUserOperationByHash.spec.js b/test/specs/mainnet/paymasterAPIs/skandha/eth_getUserOperationByHash.spec.js new file mode 100644 index 0000000..528c824 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/skandha/eth_getUserOperationByHash.spec.js @@ -0,0 +1,507 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import { PrimeSdk, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers } from 'ethers'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import helper from '../../../../utils/helper.js'; + +let mainnetPrimeSdk; +let uoHash; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the get userOperation by hash endpoint of the skandha', function () { + it( + 'PRECONDITION: Perform the transfer native token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + //wait for the execution + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + try { + uoHash = await mainnetPrimeSdk.send(op); + + console.log('UserOp hash:', uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + } + ); + + it( + 'SMOKE: Validate the eth_getUserOperationByHash endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationByHash', + params: [uoHash], + }, + header + ); + + // Add assertions + try { + assert.isNumber( + response.data.id, + message.vali_skandha_getUserOperationByHash_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.sender, + message.vali_skandha_getUserOperationByHash_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.nonce, + message.vali_skandha_getUserOperationByHash_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.initCode, + message.vali_skandha_getUserOperationByHash_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.callData, + message.vali_skandha_getUserOperationByHash_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.callGasLimit, + message.vali_skandha_getUserOperationByHash_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.verificationGasLimit, + message.vali_skandha_getUserOperationByHash_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.preVerificationGas, + message.vali_skandha_getUserOperationByHash_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.maxFeePerGas, + message.vali_skandha_getUserOperationByHash_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.maxPriorityFeePerGas, + message.vali_skandha_getUserOperationByHash_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.paymasterAndData, + message.vali_skandha_getUserOperationByHash_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.signature, + message.vali_skandha_getUserOperationByHash_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.entryPoint, + message.vali_skandha_getUserOperationByHash_entryPoint + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_1); + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationByHash endpoint of the skandha with invalid hash on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationByHash', + params: [data.invalid_hash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationByHash_2); + console.log(message.fail_skandha_getUserOperationByHash_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationByHash_2); + console.log(message.vali_skandha_getUserOperationByHash_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationByHash endpoint of the skandha with incorrect hash on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationByHash', + params: [data.incorrect_hash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationByHash_3); + console.log(message.fail_skandha_getUserOperationByHash_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationByHash_3); + console.log(message.vali_skandha_getUserOperationByHash_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationByHash endpoint of the skandha withOUT hash on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationByHash', + params: [], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationByHash_4); + console.log(message.fail_skandha_getUserOperationByHash_4); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationByHash_4); + console.log(message.vali_skandha_getUserOperationByHash_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationByHash endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'Eth_GetUserOperationByHash', + params: [uoHash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationByHash_6); + console.log(message.fail_skandha_getUserOperationByHash_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationByHash_6); + console.log(message.vali_skandha_getUserOperationByHash_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationByHash endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperatio', + params: [uoHash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationByHash_6); + console.log(message.fail_skandha_getUserOperationByHash_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationByHash_6); + console.log(message.vali_skandha_getUserOperationByHash_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationByHash endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: '', + params: [uoHash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationByHash_7); + console.log(message.fail_skandha_getUserOperationByHash_7); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationByHash_7); + console.log(message.vali_skandha_getUserOperationByHash_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_7); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/skandha/eth_getUserOperationReceipt.spec.js b/test/specs/mainnet/paymasterAPIs/skandha/eth_getUserOperationReceipt.spec.js new file mode 100644 index 0000000..c8fa766 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/skandha/eth_getUserOperationReceipt.spec.js @@ -0,0 +1,600 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import { PrimeSdk, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers } from 'ethers'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import helper from '../../../../utils/helper.js'; + +let mainnetPrimeSdk; +let uoHash; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the get userOperation receipt endpoint of the skandha', function () { + it( + 'PRECONDITION: Perform the transfer native token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + //wait for the execution + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await mainnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + try { + uoHash = await mainnetPrimeSdk.send(op); + + console.log('UserOp hash:', uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await mainnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + } + ); + + it( + 'SMOKE: Validate the eth_getUserOperationReceipt endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationReceipt', + params: [uoHash], + }, + header + ); + + // Add assertions + try { + assert.isNumber( + response.data.id, + message.vali_skandha_getUserOperationReceipt_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOpHash, + message.vali_skandha_getUserOperationReceipt_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.sender, + message.vali_skandha_getUserOperationReceipt_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.nonce, + message.vali_skandha_getUserOperationReceipt_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.actualGasCost, + message.vali_skandha_getUserOperationReceipt_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.actualGasUsed, + message.vali_skandha_getUserOperationReceipt_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.to, + message.vali_skandha_getUserOperationReceipt_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.from, + message.vali_skandha_getUserOperationReceipt_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.transactionIndex, + message.vali_skandha_getUserOperationReceipt_transactionIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.gasUsed, + message.vali_skandha_getUserOperationReceipt_gasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.logsBloom, + message.vali_skandha_getUserOperationReceipt_logsBloom + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.blockHash, + message.vali_skandha_getUserOperationReceipt_blockHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.transactionHash, + message.vali_skandha_getUserOperationReceipt_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.logs, + message.vali_skandha_getUserOperationReceipt_logs + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.blockNumber, + message.vali_skandha_getUserOperationReceipt_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.confirmations, + message.vali_skandha_getUserOperationReceipt_confirmations + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.cumulativeGasUsed, + message.vali_skandha_getUserOperationReceipt_cumulativeGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.effectiveGasPrice, + message.vali_skandha_getUserOperationReceipt_effectiveGasPrice + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.status, + message.vali_skandha_getUserOperationReceipt_status + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.type, + message.vali_skandha_getUserOperationReceipt_type + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_1); + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationReceipt endpoint of the skandha with invalid hash on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationReceipt', + params: [data.invalid_hash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationReceipt_2); + console.log(message.fail_skandha_getUserOperationReceipt_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationReceipt_2); + console.log(message.vali_skandha_getUserOperationReceipt_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationReceipt endpoint of the skandha with incorrect hash on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationReceipt', + params: [data.incorrect_hash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationReceipt_3); + console.log(message.fail_skandha_getUserOperationReceipt_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationReceipt_3); + console.log(message.vali_skandha_getUserOperationReceipt_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationReceipt endpoint of the skandha withOUT hash on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationReceipt', + params: [], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationReceipt_4); + console.log(message.fail_skandha_getUserOperationReceipt_4); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationReceipt_4); + console.log(message.vali_skandha_getUserOperationReceipt_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationReceipt endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationReceipt', + params: [uoHash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationReceipt_6); + console.log(message.fail_skandha_getUserOperationReceipt_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationReceipt_6); + console.log(message.vali_skandha_getUserOperationReceipt_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationReceipt endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperatio', + params: [uoHash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationReceipt_6); + console.log(message.fail_skandha_getUserOperationReceipt_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationReceipt_6); + console.log(message.vali_skandha_getUserOperationReceipt_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationReceipt endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: '', + params: [uoHash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationReceipt_7); + console.log(message.fail_skandha_getUserOperationReceipt_7); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationReceipt_7); + console.log(message.vali_skandha_getUserOperationReceipt_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_7); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/skandha/eth_sendUserOperation.spec.js b/test/specs/mainnet/paymasterAPIs/skandha/eth_sendUserOperation.spec.js new file mode 100644 index 0000000..aee1ed5 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/skandha/eth_sendUserOperation.spec.js @@ -0,0 +1,2195 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import { PrimeSdk, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers } from 'ethers'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import helper from '../../../../utils/helper.js'; + +let mainnetPrimeSdk; +let sender; +let nonce; +let initCode; +let callData; +let callGasLimit; +let verificationGasLimit; +let maxFeePerGas; +let maxPriorityFeePerGas; +let paymasterAndData; +let preVerificationGas; +let signature; + +//define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the send user operation endpoint of the skandha', function () { + it( + 'PRECONDITION: Perform the transfer native token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + // initializating sdk + try { + mainnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + //wait for the execution + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await mainnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await mainnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await mainnetPrimeSdk.estimate(); + + sender = op.sender; + nonce = op.nonce; + initCode = op.initCode; + callData = op.callData; + callGasLimit = op.callGasLimit; + verificationGasLimit = op.verificationGasLimit; + maxFeePerGas = op.maxFeePerGas; + maxPriorityFeePerGas = op.maxPriorityFeePerGas; + paymasterAndData = op.paymasterAndData; + preVerificationGas = op.preVerificationGas; + signature = op.signature; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + } + ); + + it( + 'SMOKE: Validate the eth_sendUserOperation endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: data.sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + // Add assertions + try { + assert.isNumber( + response.data.id, + message.vali_skandha_sendUserOperation_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result, + message.vali_skandha_sendUserOperation_result + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_1); + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid sender on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: data.invalidSender, // invalid sender + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_2); + console.log(message.fail_skandha_sendUserOperation_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_2); + console.log(message.vali_skandha_sendUserOperation_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect sender on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: data.incorrectSender, // incorrect sender + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_3); + console.log(message.fail_skandha_sendUserOperation_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_3); + console.log(message.vali_skandha_sendUserOperation_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without sender on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + nonce: nonce, // without sender + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_3); + console.log(message.fail_skandha_sendUserOperation_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_3); + console.log(message.vali_skandha_sendUserOperation_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid nonce on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: data.invalid_hex, // invalid nonce + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_5); + console.log(message.fail_skandha_sendUserOperation_5); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_5); + console.log(message.vali_skandha_sendUserOperation_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect nonce on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: data.incorrect_hex, // incorrect nonce + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_6); + console.log(message.fail_skandha_sendUserOperation_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_6); + console.log(message.vali_skandha_sendUserOperation_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without nonce on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, // without nonce + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_7); + console.log(message.fail_skandha_sendUserOperation_7); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_7); + console.log(message.vali_skandha_sendUserOperation_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid initCode on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: data.invalid_hex, // invalid initCode + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_8); + console.log(message.fail_skandha_sendUserOperation_8); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_8); + console.log(message.vali_skandha_sendUserOperation_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_8); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect initCode on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: data.incorrect_hex, // incorrect initCode + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_9); + console.log(message.fail_skandha_sendUserOperation_9); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_9); + console.log(message.vali_skandha_sendUserOperation_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_9); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without initCode on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, // without initCode + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_10); + console.log(message.fail_skandha_sendUserOperation_10); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_10); + console.log(message.vali_skandha_sendUserOperation_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_10); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid callData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: data.invalid_hex, // invalid callData + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_11); + console.log(message.fail_skandha_sendUserOperation_11); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_11); + console.log(message.vali_skandha_sendUserOperation_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_11); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect callData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: data.incorrect_hex, // incorrect callData + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_12); + console.log(message.fail_skandha_sendUserOperation_12); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_12); + console.log(message.vali_skandha_sendUserOperation_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_12); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without callData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, // without callData + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_13); + console.log(message.fail_skandha_sendUserOperation_13); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_13); + console.log(message.vali_skandha_sendUserOperation_13); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_13); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid callGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: data.invalid_hex, // invalid callGasLimit + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_14); + console.log(message.fail_skandha_sendUserOperation_14); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_14); + console.log(message.vali_skandha_sendUserOperation_14); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_14); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect callGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: data.incorrect_hex, // incorrect callGasLiit + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_15); + console.log(message.fail_skandha_sendUserOperation_15); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_15); + console.log(message.vali_skandha_sendUserOperation_15); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_15); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without callGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, // without callGasLimit + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_16); + console.log(message.fail_skandha_sendUserOperation_16); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_16); + console.log(message.vali_skandha_sendUserOperation_16); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_16); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid verificationGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: data.invalid_hex, // invalid verificationGasLimit + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_17); + console.log(message.fail_skandha_sendUserOperation_17); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_17); + console.log(message.vali_skandha_sendUserOperation_17); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_17); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect verificationGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: data.incorrect_hex, // incorrect verificationGasLimit + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_18); + console.log(message.fail_skandha_sendUserOperation_18); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_18); + console.log(message.vali_skandha_sendUserOperation_18); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_18); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without verificationGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, // without verificationGasLimit + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_19); + console.log(message.fail_skandha_sendUserOperation_19); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_19); + console.log(message.vali_skandha_sendUserOperation_19); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_19); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid preVerificationGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: data.invalid_hex, // invalid preVerificationGas + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_20); + console.log(message.fail_skandha_sendUserOperation_20); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_20); + console.log(message.vali_skandha_sendUserOperation_20); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_20); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect preVerificationGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: data.incorrect_hex, // incorrect preVerificationGas + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_21); + console.log(message.fail_skandha_sendUserOperation_21); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_21); + console.log(message.vali_skandha_sendUserOperation_21); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_21); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without preVerificationGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, // without preVerificationGas + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_22); + console.log(message.fail_skandha_sendUserOperation_22); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_22); + console.log(message.vali_skandha_sendUserOperation_22); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_22); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid maxPriorityFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: data.invalid_hex, // invalid maxPriorityFeePerGas + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_23); + console.log(message.fail_skandha_sendUserOperation_23); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_23); + console.log(message.vali_skandha_sendUserOperation_23); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_23); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect maxPriorityFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: data.incorrect_hex, // incorrect maxPriorityFeePerGas + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_24); + console.log(message.fail_skandha_sendUserOperation_24); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_24); + console.log(message.vali_skandha_sendUserOperation_24); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_24); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without maxPriorityFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, // without maxPriorityFeePerGas + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_25); + console.log(message.fail_skandha_sendUserOperation_25); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_25); + console.log(message.vali_skandha_sendUserOperation_25); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_25); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid maxFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: data.invalid_hex, // invalid maxFeePerGas + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_26); + console.log(message.fail_skandha_sendUserOperation_26); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_26); + console.log(message.vali_skandha_sendUserOperation_26); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_26); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect maxFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: data.incorrect_hex, // incorrect maxFeePerGas + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_27); + console.log(message.fail_skandha_sendUserOperation_27); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_27); + console.log(message.vali_skandha_sendUserOperation_27); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_27); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without maxFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, // without maxFeePerGas + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_28); + console.log(message.fail_skandha_sendUserOperation_28); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_28); + console.log(message.vali_skandha_sendUserOperation_28); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_28); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid paymasterAndData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: data.invalid_hex, // invalid paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_29); + console.log(message.fail_skandha_sendUserOperation_29); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_29); + console.log(message.vali_skandha_sendUserOperation_29); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_29); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect paymasterAndData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: data.incorrect_hex, // incorrect paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_30); + console.log(message.fail_skandha_sendUserOperation_30); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_30); + console.log(message.vali_skandha_sendUserOperation_30); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_30); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without paymasterAndData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, // without paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_31); + console.log(message.fail_skandha_sendUserOperation_31); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_31); + console.log(message.vali_skandha_sendUserOperation_31); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_31); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid signature on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: data.invalid_hex, // invalid signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_32); + console.log(message.fail_skandha_sendUserOperation_32); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_32); + console.log(message.vali_skandha_sendUserOperation_32); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_32); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect signature on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: data.incorrect_hex, // incorrect signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_33); + console.log(message.fail_skandha_sendUserOperation_33); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_33); + console.log(message.vali_skandha_sendUserOperation_33); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_33); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without signature on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, // without signature + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_34); + console.log(message.fail_skandha_sendUserOperation_34); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_34); + console.log(message.vali_skandha_sendUserOperation_34); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_34); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.invalidEntryPointAddress, // invalid entry point address + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_35); + console.log(message.fail_skandha_sendUserOperation_35); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_35); + console.log(message.vali_skandha_sendUserOperation_35); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_35); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.incorrectentryPointAddress, // incorrect entry point address + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_36); + console.log(message.fail_skandha_sendUserOperation_36); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_36); + console.log(message.vali_skandha_sendUserOperation_36); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_36); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + ], // without entry point address + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_37); + console.log(message.fail_skandha_sendUserOperation_37); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_37); + console.log(message.vali_skandha_sendUserOperation_37); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_37); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'Eth_SendUserOperation', // invalid method name + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_38); + console.log(message.fail_skandha_sendUserOperation_38); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_38); + console.log(message.vali_skandha_sendUserOperation_38); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_38); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOper', // incorrect method name + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_39); + console.log(message.fail_skandha_sendUserOperation_39); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_39); + console.log(message.vali_skandha_sendUserOperation_39); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_39); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: '', // without method name + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_40); + console.log(message.fail_skandha_sendUserOperation_40); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_40); + console.log(message.vali_skandha_sendUserOperation_40); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_40); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/skandha/skandha_config.spec.js b/test/specs/mainnet/paymasterAPIs/skandha/skandha_config.spec.js new file mode 100644 index 0000000..f5a100b --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/skandha/skandha_config.spec.js @@ -0,0 +1,199 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import { randomChainName } from '../../../../utils/sharedData_mainnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the config endpoint of the skandha', function () { + it( + 'SMOKE: Validate the skandha_config endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_config', + id: 3, + }, + header + ); + + // Add assertions + try { + assert.isNumber(response.data.id, message.vali_skandha_config_id); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.flags, + message.vali_skandha_config_flags + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.entryPoints, + message.vali_skandha_config_entryPoints + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.beneficiary, + message.vali_skandha_config_beneficiary + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.relayers, + message.vali_skandha_config_relayers + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_config_1); + } + } + ); + + it( + 'REGRESSION: Validate the skandha_config endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'Skandha_Config', // invalid method name + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_config_2); + console.log(message.fail_skandha_config_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_config_2); + console.log(message.vali_skandha_config_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_config_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_config endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_configuration', // incorrect method name + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_config_3); + console.log(message.fail_skandha_config_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_config_3); + console.log(message.vali_skandha_config_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_config_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_config endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: '', // without method name + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_config_3); + console.log(message.fail_skandha_config_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_config_3); + console.log(message.vali_skandha_config_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_config_3); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/skandha/skandha_feeHistory.spec.js b/test/specs/mainnet/paymasterAPIs/skandha/skandha_feeHistory.spec.js new file mode 100644 index 0000000..0b3d900 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/skandha/skandha_feeHistory.spec.js @@ -0,0 +1,301 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import { randomChainName } from '../../../../utils/sharedData_mainnet.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the fee history endpoint of the skandha', function () { + it( + 'SMOKE: Validate the skandha_feeHistory endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_feeHistory', + id: 3, + params: [data.entryPointAddress, '10', 'latest'], + }, + header + ); + + // Add assertions + try { + assert.isNumber(response.data.id, message.vali_skandha_feeHistory_id); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.actualGasPrice, + message.vali_skandha_feeHistory_actualGasPrice + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.maxFeePerGas, + message.vali_skandha_feeHistory_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.maxPriorityFeePerGas, + message.vali_skandha_feeHistory_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_1); + } + } + ); + + it( + 'REGRESSION: Validate the skandha_feeHistory endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'Skandha_FeeHistory', // invalid method name + id: 3, + params: [data.entryPointAddress, '10', 'latest'], + }, + header + ); + + addContext(test, message.fail_skandha_feeHistory_2); + console.log(message.fail_skandha_feeHistory_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_feeHistory_2); + console.log(message.vali_skandha_feeHistory_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_feeHistory endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_getGas', // incorrect method name + id: 3, + params: [data.entryPointAddress, '10', 'latest'], + }, + header + ); + + addContext(test, message.fail_skandha_feeHistory_3); + console.log(message.fail_skandha_feeHistory_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_feeHistory_3); + console.log(message.vali_skandha_feeHistory_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_feeHistory endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: '', // without method name + id: 3, + params: [data.entryPointAddress, '10', 'latest'], + }, + header + ); + + addContext(test, message.fail_skandha_feeHistory_3); + console.log(message.fail_skandha_feeHistory_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_feeHistory_3); + console.log(message.vali_skandha_feeHistory_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_feeHistory endpoint of the skandha with invalid entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_feeHistory', + id: 3, + params: [data.invalidEntryPointAddress, '10', 'latest'], // invalid entry point address + }, + header + ); + + addContext(test, message.fail_skandha_feeHistory_5); + console.log(message.fail_skandha_feeHistory_5); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_feeHistory_5); + console.log(message.vali_skandha_feeHistory_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_feeHistory endpoint of the skandha with incorrect entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_feeHistory', + id: 3, + params: [data.incorrectentryPointAddress, '10', 'latest'], // incorrect entry point address + }, + header + ); + + addContext(test, message.fail_skandha_feeHistory_6); + console.log(message.fail_skandha_feeHistory_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_feeHistory_6); + console.log(message.vali_skandha_feeHistory_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_feeHistory endpoint of the skandha without entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_feeHistory', + id: 3, + params: ['10', 'latest'], // without entry point address + }, + header + ); + + addContext(test, message.fail_skandha_feeHistory_7); + console.log(message.fail_skandha_feeHistory_7); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_feeHistory_7); + console.log(message.vali_skandha_feeHistory_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_7); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/skandha/skandha_getGasPrice.spec.js b/test/specs/mainnet/paymasterAPIs/skandha/skandha_getGasPrice.spec.js new file mode 100644 index 0000000..cd868d6 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/skandha/skandha_getGasPrice.spec.js @@ -0,0 +1,180 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import { randomChainName } from '../../../../utils/sharedData_mainnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the get gas price endpoint of the skandha', function () { + it( + 'SMOKE: Validate the skandha_getGasPrice endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_getGasPrice', + id: 3, + }, + header + ); + + // Add assertions + try { + assert.isNumber( + response.data.id, + message.vali_skandha_getGasPrice_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.maxPriorityFeePerGas, + message.vali_skandha_getGasPrice_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.maxFeePerGas, + message.vali_skandha_getGasPrice_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getGasPrice_1); + } + } + ); + + it( + 'REGRESSION: Validate the skandha_getGasPrice endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'Skandha_GetGasPrice', // invalid method name + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_getGasPrice_2); + console.log(message.fail_skandha_getGasPrice_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getGasPrice_2); + console.log(message.vali_skandha_getGasPrice_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getGasPrice_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_getGasPrice endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_getGas', // incorrect method name + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_getGasPrice_3); + console.log(message.fail_skandha_getGasPrice_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getGasPrice_3); + console.log(message.vali_skandha_getGasPrice_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getGasPrice_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_getGasPrice endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: '', // without method name + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_getGasPrice_3); + console.log(message.fail_skandha_getGasPrice_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getGasPrice_3); + console.log(message.vali_skandha_getGasPrice_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getGasPrice_3); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/addPolicy.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/addPolicy.spec.js new file mode 100644 index 0000000..f2de401 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/addPolicy.spec.js @@ -0,0 +1,878 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + generateRandomString, + handleErrorValidation, +} from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +const randomName = generateRandomString(15); +const randomDescription = generateRandomString(15); + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate the add sponsorship policy api of Arka', function () { + it( + 'SMOKE: Validate the add policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: 5000, + globalMaximumNative: 1000, + globalMaximumOpCount: 1000, + perUserMaximumApplicable: true, + perUserMaximumUsd: 100, + perUserMaximumNative: 200, + perUserMaximumOpCount: 50, + perOpMaximumApplicable: true, + perOpMaximumUsd: 10, + perOpMaximumNative: 20, + }; + + // send POST request with headers and data + const response = await axios.post( + `${`${data.arka_fqdn}/${data.arka_addPolicy}`}`, + requestData, + { + headers, + } + ); + + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_addPolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_addPolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_addPolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody.id, message.vali_addPolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addPolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.invalid_sponsorAddress, // invalid wallet address + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_2); + assert.fail(message.vali_addPolicy_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_addPolicy_2, + 403 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.incorrect_sponsorAddress, // incorrect wallet address + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_3); + assert.fail(message.vali_addPolicy_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_addPolicy_3, + 403 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + name: randomName, // without wallet address + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_4); + assert.fail(message.vali_addPolicy_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_2, + message.fail_addPolicy_4, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with wallet address as a empty string on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: '', // empty string + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_5); + assert.fail(message.vali_addPolicy_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_2, + message.fail_addPolicy_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with wallet address as a only blank spaces on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: ' ', // blank spaces + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_6); + assert.fail(message.vali_addPolicy_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_addPolicy_6, + 403 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka without name on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + description: randomDescription, // without name + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_7); + assert.fail(message.vali_addPolicy_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_2, + message.fail_addPolicy_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with name as a empty string on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: '', // empty string + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_8); + assert.fail(message.vali_addPolicy_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_2, + message.fail_addPolicy_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with name as a only blank spaces on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: ' ', // blank spaces + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_9); + assert.fail(message.vali_addPolicy_9); + } catch (e) { + // TO DO: Update the constant message + // TO DO: Update the status code + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_addPolicy_9, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka without description on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, // without description + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_10); + assert.fail(message.vali_addPolicy_10); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_3, + message.fail_addPolicy_10, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with description as a empty string on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: '', // empty string + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_11); + assert.fail(message.vali_addPolicy_11); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_3, + message.fail_addPolicy_11, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with description as a only blank spaces on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: ' ', // blank spaces + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_12); + assert.fail(message.vali_addPolicy_12); + } catch (e) { + // TO DO: Update the constant message + // TO DO: Update the status code + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_addPolicy_12, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka without EPVersion on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + isPerpetual: true, // without EPVersion + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_13); + assert.fail(message.vali_addPolicy_13); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_4, + message.fail_addPolicy_13, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with invalid EPVersion on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EP_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_14); + assert.fail(message.vali_addPolicy_14); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_5, + message.fail_addPolicy_14, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with EPVersion empty array on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: [], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_15); + assert.fail(message.vali_addPolicy_15); + } catch (e) { + // TO DO: Update the constant message + // TO DO: Update the status code + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_5, + message.fail_addPolicy_15, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + invalid_headers, + } + ); + + addContext(test, message.vali_addPolicy_16); + assert.fail(message.vali_addPolicy_16); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_addPolicy_16, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_addPolicy_17); + assert.fail(message.vali_addPolicy_17); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_addPolicy_17, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_addPolicy_18); + assert.fail(message.vali_addPolicy_18); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_addPolicy_18, + 400 + ); + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/deletePolicy.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/deletePolicy.spec.js new file mode 100644 index 0000000..5182b07 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/deletePolicy.spec.js @@ -0,0 +1,418 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + generateRandomString, + handleErrorValidation, +} from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +const randomName = generateRandomString(15); +const randomDescription = generateRandomString(15); + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate the delete sponsorship policy api of Arka', function () { + let newId; + + it( + 'PRECONDITION: Validate the add policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: 5000, + globalMaximumNative: 1000, + globalMaximumOpCount: 1000, + perUserMaximumApplicable: true, + perUserMaximumUsd: 100, + perUserMaximumNative: 200, + perUserMaximumOpCount: 50, + perOpMaximumApplicable: true, + perOpMaximumUsd: 10, + perOpMaximumNative: 20, + }; + + // send POST request with headers and data + const response = await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + const responseBody = response.data; + newId = responseBody.id; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_addPolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_addPolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_addPolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody.id, message.vali_addPolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addPolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka with invalid id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${data.invalid_newId}`, + { + headers, + } + ); + + addContext(test, message.vali_deletePolicy_2); + assert.fail(message.vali_deletePolicy_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_deletePolicy_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka with incorrect id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${data.incorrect_newId}`, + { + headers, + } + ); + + addContext(test, message.vali_deletePolicy_3); + assert.fail(message.vali_deletePolicy_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_deletePolicy_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka without id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.delete(`${data.arka_deletePolicy}`, { + headers, + }); + + addContext(test, message.vali_deletePolicy_4); + assert.fail(message.vali_deletePolicy_4); + } catch (e) { + let error = e.message; + if (error.includes(constant.sponsorshipPolicy_walletAddress_12)) { + addContext(test, message.vali_deletePolicy_1); + console.log(message.vali_deletePolicy_1); + } else { + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deletePolicy_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka with zero value id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${data.zero_newId}`, + { + headers, + } + ); + + addContext(test, message.vali_deletePolicy_5); + assert.fail(message.vali_deletePolicy_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_deletePolicy_5, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka with negative id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/-${newId}`, + { + headers, + } + ); + + addContext(test, message.vali_deletePolicy_6); + assert.fail(message.vali_deletePolicy_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_deletePolicy_6, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${newId}`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_deletePolicy_7); + assert.fail(message.vali_deletePolicy_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_deletePolicy_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${newId}`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_deletePolicy_8); + assert.fail(message.vali_deletePolicy_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_deletePolicy_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + await axios.put( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${newId}`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_deletePolicy_9); + assert.fail(message.vali_deletePolicy_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_deletePolicy_9, + 400 + ); + } + } + ); + + it( + 'SMOKE: Validate the delete policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${newId}`, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.message, + `Successfully deleted policy with id ${newId}`, + message.vali_deletePolicy_message + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deletePolicy_1); + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/disablePolicy.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/disablePolicy.spec.js new file mode 100644 index 0000000..915e121 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/disablePolicy.spec.js @@ -0,0 +1,457 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + generateRandomString, + handleErrorValidation, +} from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +const randomName = generateRandomString(15); +const randomDescription = generateRandomString(15); + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate the disable sponsorship policy api of Arka', function () { + let newId; + + it( + 'PRECONDITION: Validate the add policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: 5000, + globalMaximumNative: 1000, + globalMaximumOpCount: 1000, + perUserMaximumApplicable: true, + perUserMaximumUsd: 100, + perUserMaximumNative: 200, + perUserMaximumOpCount: 50, + perOpMaximumApplicable: true, + perOpMaximumUsd: 10, + perOpMaximumNative: 20, + }; + + // send POST request with headers and data + const response = await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + const responseBody = response.data; + newId = responseBody.id; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_addPolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_addPolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_addPolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody.id, message.vali_addPolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addPolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with invalid id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${data.invalid_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_disablePolicy_2); + assert.fail(message.vali_disablePolicy_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_disablePolicy_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with incorrect id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${data.incorrect_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_disablePolicy_3); + assert.fail(message.vali_disablePolicy_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_disablePolicy_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka without id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_disablePolicy_4); + assert.fail(message.vali_disablePolicy_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_disablePolicy_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with zero id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${data.zero_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_disablePolicy_5); + assert.fail(message.vali_disablePolicy_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_disablePolicy_5, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with negative id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/-${newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_disablePolicy_6); + assert.fail(message.vali_disablePolicy_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_disablePolicy_6, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${newId}`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_disablePolicy_7); + assert.fail(message.vali_disablePolicy_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_disablePolicy_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${newId}`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_disablePolicy_8); + assert.fail(message.vali_disablePolicy_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_disablePolicy_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${newId}`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_disablePolicy_9); + assert.fail(message.vali_disablePolicy_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_disablePolicy_9, + 400 + ); + } + } + ); + + it( + 'SMOKE: Validate the disable policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${newId}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.message, + `Successfully disabled policy with id ${newId}`, + message.vali_disablePolicy_message + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_disablePolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with already disabled policy on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_disablePolicy_10); + assert.fail(message.vali_disablePolicy_10); + } catch (e) { + let error = e.response.data.error; + if (error.includes(constant.sponsorshipPolicy_walletAddress_14)) { + // validate the HTTP status code is 500 + expect(e.response.status).to.equal(500); + } else { + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_disablePolicy_10); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/enablePolicy.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/enablePolicy.spec.js new file mode 100644 index 0000000..f7dfcd9 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/enablePolicy.spec.js @@ -0,0 +1,457 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + generateRandomString, + handleErrorValidation, +} from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +const randomName = generateRandomString(15); +const randomDescription = generateRandomString(15); + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate the enable sponsorship policy api of Arka', function () { + let newId; + + it( + 'PRECONDITION: Validate the add policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: false, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: 5000, + globalMaximumNative: 1000, + globalMaximumOpCount: 1000, + perUserMaximumApplicable: true, + perUserMaximumUsd: 100, + perUserMaximumNative: 200, + perUserMaximumOpCount: 50, + perOpMaximumApplicable: true, + perOpMaximumUsd: 10, + perOpMaximumNative: 20, + }; + + // send POST request with headers and data + const response = await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + const responseBody = response.data; + newId = responseBody.id; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_addPolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_addPolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_addPolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody.id, message.vali_addPolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addPolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with invalid id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${data.invalid_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_enablePolicy_2); + assert.fail(message.vali_enablePolicy_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_enablePolicy_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with incorrect id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${data.incorrect_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_enablePolicy_3); + assert.fail(message.vali_enablePolicy_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_enablePolicy_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka without id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_enablePolicy_4); + assert.fail(message.vali_enablePolicy_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_enablePolicy_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with zero id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${data.zero_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_enablePolicy_5); + assert.fail(message.vali_enablePolicy_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_enablePolicy_5, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with negative id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/-${newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_enablePolicy_6); + assert.fail(message.vali_enablePolicy_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_enablePolicy_6, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${newId}`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_enablePolicy_7); + assert.fail(message.vali_enablePolicy_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_enablePolicy_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${newId}`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_enablePolicy_8); + assert.fail(message.vali_enablePolicy_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_enablePolicy_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${newId}`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_enablePolicy_9); + assert.fail(message.vali_enablePolicy_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_enablePolicy_9, + 400 + ); + } + } + ); + + it( + 'SMOKE: Validate the enable policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${newId}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.message, + `Successfully enabled policy with id ${newId}`, + message.vali_enablePolicy_message + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_enablePolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with already enabled policy on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_enablePolicy_10); + assert.fail(message.vali_enablePolicy_10); + } catch (e) { + let error = e.response.data.error; + if (error.includes(constant.sponsorshipPolicy_walletAddress_15)) { + // validate the HTTP status code is 500 + expect(e.response.status).to.equal(500); + } else { + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_enablePolicy_10); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddress.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddress.spec.js new file mode 100644 index 0000000..5ed6d66 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddress.spec.js @@ -0,0 +1,288 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { randomChainName } from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the latest sponsorship policy api using wallet address', function () { + it( + 'SMOKE: Fetching the latest policy of particular wallet address endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/latest`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.isNotEmpty( + responseBody.walletAddress, + message.vali_latestPolicyWalletAddress_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.name, + message.vali_latestPolicyWalletAddress_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.description, + message.vali_latestPolicyWalletAddress_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody.id, + message.vali_latestPolicyWalletAddress_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_latestPolicyWalletAddress_1); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddress_2); + assert.fail(message.vali_latestPolicyWalletAddress_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddress_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddress_3); + assert.fail(message.vali_latestPolicyWalletAddress_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddress_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddress_4); + assert.fail(message.vali_latestPolicyWalletAddress_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddress_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/latest`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddress_5); + assert.fail(message.vali_latestPolicyWalletAddress_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddress_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/latest`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddress_6); + assert.fail(message.vali_latestPolicyWalletAddress_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddress_6, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/latest`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddress_7); + assert.fail(message.vali_latestPolicyWalletAddress_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddress_7, + 400 + ); + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressChainid.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressChainid.spec.js new file mode 100644 index 0000000..8ca1a80 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressChainid.spec.js @@ -0,0 +1,413 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomInvalidChainId, + randomIncorrectChainId, +} from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the latest sponsorship policy api using wallet address and chainid', function () { + it( + 'SMOKE: Fetching the latest policy of particular wallet address and chain id endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.isNotEmpty( + responseBody.walletAddress, + message.vali_latestPolicyWalletAddressandChainid_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.name, + message.vali_latestPolicyWalletAddressandChainid_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.description, + message.vali_latestPolicyWalletAddressandChainid_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody.id, + message.vali_latestPolicyWalletAddressandChainid_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_latestPolicyWalletAddressandChainid_1); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_2); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandChainid_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_3); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandChainid_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_4); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_latestPolicyWalletAddressandChainid_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka with invalid chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/${randomInvalidChainId}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_5); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandChainid_5, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka with incorrect chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/${randomIncorrectChainId}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_6); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandChainid_6, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka without chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_7); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_latestPolicyWalletAddressandChainid_7, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka without wallet address and chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_8); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandChainid_8, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/${randomChainId}/latest`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_9); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressandChainid_9, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/${randomChainId}/latest`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_10); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_10); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressandChainid_10, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/${randomChainId}/latest`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_11); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_11); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressandChainid_11, + 400 + ); + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersion.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersion.spec.js new file mode 100644 index 0000000..54ae956 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersion.spec.js @@ -0,0 +1,408 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { randomChainName } from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the latest sponsorship policy api using wallet address and ep version', function () { + it( + 'SMOKE: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/latest`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.isNotEmpty( + responseBody.walletAddress, + message.vali_latestPolicyWalletAddressandEPVersion_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.name, + message.vali_latestPolicyWalletAddressandEPVersion_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.description, + message.vali_latestPolicyWalletAddressandEPVersion_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody.id, + message.vali_latestPolicyWalletAddressandEPVersion_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_latestPolicyWalletAddressandEPVersion_1); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}/ep-version/${data.ep07}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_2); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandEPVersion_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}/ep-version/${data.ep07}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_3); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandEPVersion_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/ep-version/${data.ep07}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_4); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_latestPolicyWalletAddressandEPVersion_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka with invalid entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.invalid_epversion}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_5); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_latestPolicyWalletAddressandEPVersion_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka with incorrect entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.incorrect_epversion}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_6); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_latestPolicyWalletAddressandEPVersion_6, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka without entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_7); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_latestPolicyWalletAddressandEPVersion_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka without wallet address and entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_8); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandEPVersion_8, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/latest`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_9); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressandEPVersion_9, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/latest`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_10); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_10); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressandEPVersion_10, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/latest`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_11); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_11); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressandEPVersion_11, + 400 + ); + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersionChainid.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersionChainid.spec.js new file mode 100644 index 0000000..b647ab2 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersionChainid.spec.js @@ -0,0 +1,570 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomInvalidChainId, + randomIncorrectChainId, +} from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the latest sponsorship policy api using wallet address, ep version, and chainid', function () { + it( + 'SMOKE: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.isNotEmpty( + responseBody.walletAddress, + message.vali_latestPolicyWalletAddressEPVersionandChainid_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.name, + message.vali_latestPolicyWalletAddressEPVersionandChainid_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.description, + message.vali_latestPolicyWalletAddressEPVersionandChainid_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody.id, + message.vali_latestPolicyWalletAddressEPVersionandChainid_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail( + message.fail_latestPolicyWalletAddressEPVersionandChainid_1 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_2 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_2 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressEPVersionandChainid_2, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_3 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_3 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressEPVersionandChainid_3, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_4 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_4 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_latestPolicyWalletAddressEPVersionandChainid_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.invalid_epversion}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_5 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_5 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_latestPolicyWalletAddressEPVersionandChainid_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.incorrect_epversion}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_6 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_6 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_latestPolicyWalletAddressEPVersionandChainid_6, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka without entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_7 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_7 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_latestPolicyWalletAddressEPVersionandChainid_7, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomInvalidChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_8 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_8 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressEPVersionandChainid_8, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomIncorrectChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_9 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_9 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressEPVersionandChainid_9, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka without chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_10 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_10 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.invalid_data, + message.fail_latestPolicyWalletAddressEPVersionandChainid_10, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka without wallet address and entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_11 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_11 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressEPVersionandChainid_11, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + { + invalid_headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_12 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_12 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressEPVersionandChainid_12, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + { + incorrect_headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_13 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_13 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressEPVersionandChainid_13, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + { + withoutapikey_headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_14 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_14 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressEPVersionandChainid_14, + 400 + ); + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicy.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicy.spec.js new file mode 100644 index 0000000..75ee457 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicy.spec.js @@ -0,0 +1,185 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { randomChainName } from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate records of the sponsorship policy api', function () { + it( + 'SMOKE: Validate the policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policy}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + for (let i = 0; i < responseBody.length; i++) { + try { + assert.isNotEmpty( + responseBody[i].walletAddress, + message.vali_policy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty(responseBody[i].name, message.vali_policy_name); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].description, + message.vali_policy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody[i].id, message.vali_policy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_policy_1); + } + } + ); + + it( + 'REGRESSION: Validate the policy endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}`, { + invalid_headers, + }); + + addContext(test, message.vali_policy_2); + assert.fail(message.vali_policy_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policy_2, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the policy endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}`, { + incorrect_headers, + }); + + addContext(test, message.vali_policy_3); + assert.fail(message.vali_policy_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policy_3, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the policy endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}`, { + withoutapikey_headers, + }); + + addContext(test, message.vali_policy_4); + assert.fail(message.vali_policy_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policy_4, + 400 + ); + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyId.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyId.spec.js new file mode 100644 index 0000000..ba45524 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyId.spec.js @@ -0,0 +1,448 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + generateRandomString, + handleErrorValidation, +} from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +const randomName = generateRandomString(15); +const randomDescription = generateRandomString(15); + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the sponsorship policy api using id', function () { + let newId; + + it( + 'PRECONDITION: Validate the add policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: 5000, + globalMaximumNative: 1000, + globalMaximumOpCount: 1000, + perUserMaximumApplicable: true, + perUserMaximumUsd: 100, + perUserMaximumNative: 200, + perUserMaximumOpCount: 50, + perOpMaximumApplicable: true, + perOpMaximumUsd: 10, + perOpMaximumNative: 20, + }; + + // send POST request with headers and data + const response = await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + + requestData, + { + headers, + } + ); + const responseBody = response.data; + newId = responseBody.id; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_addPolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_addPolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_addPolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody.id, message.vali_addPolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addPolicy_1); + } + } + ); + + it( + 'SMOKE: Fetching the policy of particular id endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policy}/${newId}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_policyId_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_policyId_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_policyId_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal(responseBody.id, newId, message.vali_policyId_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_policyId_1); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka with invalid id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policy}/${data.invalid_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyId_2); + assert.fail(message.vali_policyId_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyId_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka with incorrect id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policy}/${data.incorrect_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyId_3); + assert.fail(message.vali_policyId_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyId_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka without id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}`, null, { + headers, + }); + + addContext(test, message.vali_policyId_4); + assert.fail(message.vali_policyId_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyId_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka with zero id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policy}/${data.zero_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyId_5); + assert.fail(message.vali_policyId_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyId_5, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka with negative id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policy}/-${newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyId_6); + assert.fail(message.vali_policyId_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyId_6, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}/${newId}`, { + invalid_headers, + }); + + addContext(test, message.vali_policyId_7); + assert.fail(message.vali_policyId_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyId_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}/${newId}`, { + incorrect_headers, + }); + + addContext(test, message.vali_policyId_8); + assert.fail(message.vali_policyId_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyId_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}/${newId}`, { + withoutapikey_headers, + }); + + addContext(test, message.vali_policyId_9); + assert.fail(message.vali_policyId_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyId_9, + 400 + ); + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddress.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddress.spec.js new file mode 100644 index 0000000..25c1f96 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddress.spec.js @@ -0,0 +1,286 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { randomChainName } from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the sponsorship policy api using walet address', function () { + it( + 'SMOKE: Fetching the policy of particular wallet address endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + for (let i = 0; i < responseBody.length; i++) { + try { + assert.isNotEmpty( + responseBody[i].walletAddress, + message.vali_policyWalletAddress_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].name, + message.vali_policyWalletAddress_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].description, + message.vali_policyWalletAddress_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody[i].id, + message.vali_policyWalletAddress_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_policyWalletAddress_1); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddress_2); + assert.fail(message.vali_policyWalletAddress_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddress_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddress_3); + assert.fail(message.vali_policyWalletAddress_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddress_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_policyWalletAddress}`, null, { + headers, + }); + + addContext(test, message.vali_policyWalletAddress_4); + assert.fail(message.vali_policyWalletAddress_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_7, + message.fail_policyWalletAddress_4, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_policyWalletAddress_5); + assert.fail(message.vali_policyWalletAddress_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddress_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_policyWalletAddress_6); + assert.fail(message.vali_policyWalletAddress_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddress_6, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_policyWalletAddress_7); + assert.fail(message.vali_policyWalletAddress_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddress_7, + 400 + ); + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersion.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersion.spec.js new file mode 100644 index 0000000..f9c4802 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersion.spec.js @@ -0,0 +1,406 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { randomChainName } from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the sponsorship policy api using walet address and ep version', function () { + it( + 'SMOKE: Fetching the policy of particular wallet address and entry point version endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + for (let i = 0; i < responseBody.length; i++) { + try { + assert.isNotEmpty( + responseBody[i].walletAddress, + message.vali_policyWalletAddressandEPVersion_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].name, + message.vali_policyWalletAddressandEPVersion_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].description, + message.vali_policyWalletAddressandEPVersion_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody[i].id, + message.vali_policyWalletAddressandEPVersion_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_policyWalletAddressandEPVersion_1); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}/ep-version/${data.ep07}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_2); + assert.fail(message.vali_policyWalletAddressandEPVersion_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddressandEPVersion_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}/ep-version/${data.ep07}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_3); + assert.fail(message.vali_policyWalletAddressandEPVersion_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddressandEPVersion_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/ep-version/${data.ep07}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_4); + assert.fail(message.vali_policyWalletAddressandEPVersion_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_policyWalletAddressandEPVersion_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka with invalid entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.invalid_epversion}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_5); + assert.fail(message.vali_policyWalletAddressandEPVersion_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_policyWalletAddressandEPVersion_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka with incorrect entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.incorrect_epversion}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_6); + assert.fail(message.vali_policyWalletAddressandEPVersion_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_policyWalletAddressandEPVersion_6, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka without entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_7); + assert.fail(message.vali_policyWalletAddressandEPVersion_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_policyWalletAddressandEPVersion_7, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka without wallet address and entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_policyWalletAddress}`, null, { + headers, + }); + + addContext(test, message.vali_policyWalletAddressandEPVersion_8); + assert.fail(message.vali_policyWalletAddressandEPVersion_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_7, + message.fail_policyWalletAddressandEPVersion_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_9); + assert.fail(message.vali_policyWalletAddressandEPVersion_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddressandEPVersion_9, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_10); + assert.fail(message.vali_policyWalletAddressandEPVersion_10); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddressandEPVersion_10, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_11); + assert.fail(message.vali_policyWalletAddressandEPVersion_11); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddressandEPVersion_11, + 400 + ); + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersionChainid.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersionChainid.spec.js new file mode 100644 index 0000000..97347e1 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersionChainid.spec.js @@ -0,0 +1,516 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomInvalidChainId, + randomIncorrectChainId, +} from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the sponsorship policy api using walet address, ep version, and chainid', function () { + it( + 'SMOKE: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + for (let i = 0; i < responseBody.length; i++) { + try { + assert.isNotEmpty( + responseBody[i].walletAddress, + message.vali_policyWalletAddressEPVersionandChainid_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].name, + message.vali_policyWalletAddressEPVersionandChainid_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].description, + message.vali_policyWalletAddressEPVersionandChainid_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody[i].id, + message.vali_policyWalletAddressEPVersionandChainid_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_policyWalletAddressEPVersionandChainid_1); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_2); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddressEPVersionandChainid_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_3); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddressandEPVersion_11, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_4); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_policyWalletAddressEPVersionandChainid_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.invalid_epversion}/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_5); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_policyWalletAddressEPVersionandChainid_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.incorrect_epversion}/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_6); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_policyWalletAddressEPVersionandChainid_6, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka without entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_7); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_policyWalletAddressEPVersionandChainid_7, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomInvalidChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_8); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddressEPVersionandChainid_8, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomIncorrectChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_9); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddressEPVersionandChainid_9, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka without chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_policyWalletAddressEPVersionandChainid_10 + ); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_10); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_policyWalletAddressEPVersionandChainid_10, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka without wallet address and entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_policyWalletAddress}`, null, { + headers, + }); + + addContext( + test, + message.vali_policyWalletAddressEPVersionandChainid_11 + ); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_11); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_7, + message.fail_policyWalletAddressEPVersionandChainid_11, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + { + invalid_headers, + } + ); + + addContext( + test, + message.vali_policyWalletAddressEPVersionandChainid_12 + ); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_12); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddressEPVersionandChainid_12, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + { + incorrect_headers, + } + ); + + addContext( + test, + message.vali_policyWalletAddressEPVersionandChainid_13 + ); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_13); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddressEPVersionandChainid_13, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + { + withoutapikey_headers, + } + ); + + addContext( + test, + message.vali_policyWalletAddressEPVersionandChainid_14 + ); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_14); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddressEPVersionandChainid_14, + 400 + ); + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/updatePolicy.spec.js b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/updatePolicy.spec.js new file mode 100644 index 0000000..5564b9f --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/sponsorshipPolicy/updatePolicy.spec.js @@ -0,0 +1,1786 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + generateRandomString, + handleErrorValidation, +} from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +const randomName = generateRandomString(15); +const randomDescription = generateRandomString(15); +const updatedRandomName = generateRandomString(15); +const updatedRandomDescription = generateRandomString(15); + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the sponsorship policy api using walet address', function () { + let newId; + + it( + 'PRECONDITION: Validate the add policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: 5000, + globalMaximumNative: 1000, + globalMaximumOpCount: 1000, + perUserMaximumApplicable: true, + perUserMaximumUsd: 100, + perUserMaximumNative: 200, + perUserMaximumOpCount: 50, + perOpMaximumApplicable: true, + perOpMaximumUsd: 10, + perOpMaximumNative: 20, + }; + + // send POST request with headers and data + const response = await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + const responseBody = response.data; + newId = responseBody.id; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_addPolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_addPolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_addPolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody.id, message.vali_addPolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addPolicy_1); + } + } + ); + + it( + 'SMOKE: Validate the update policy endpoint of Arka on the ' + + randomChainName + + ' network: case 1', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: updatedRandomName, + description: updatedRandomDescription, + isPublic: true, + isEnabled: false, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_07'], + isPerpetual: true, + globalMaximumApplicable: false, + globalMaximumUsd: data.globalMaximumUsd, + globalMaximumNative: data.globalMaximumNative, + globalMaximumOpCount: data.globalMaximumOpCount, + perUserMaximumApplicable: false, + perUserMaximumUsd: data.perUserMaximumUsd, + perUserMaximumNative: data.perUserMaximumNative, + perUserMaximumOpCount: data.perUserMaximumOpCount, + perOpMaximumApplicable: false, + perOpMaximumUsd: data.perOpMaximumUsd, + perOpMaximumNative: data.perOpMaximumUsd, + }; + + // send POST request with headers and data + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal(responseBody.id, newId, message.vali_updatePolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_updatePolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + updatedRandomName, + message.vali_updatePolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + updatedRandomDescription, + message.vali_updatePolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // ********************** + + try { + assert.isTrue( + responseBody.isPublic, + message.vali_updatePolicy_isPublic + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isFalse( + responseBody.isEnabled, + message.vali_updatePolicy_isEnabled + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.isApplicableToAllNetworks, + message.vali_updatePolicy_isApplicableToAllNetworks + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.enabledChains[0], + randomChainId, + message.vali_updatePolicy_enabledChains + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.supportedEPVersions[0], + data.ep07, + message.vali_updatePolicy_supportedEPVersions + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.isPerpetual, + message.vali_updatePolicy_isPerpetual + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isFalse( + responseBody.globalMaximumApplicable, + message.vali_updatePolicy_globalMaximumApplicable + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.globalMaximumUsd, + message.vali_updatePolicy_globalMaximumUsd + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.globalMaximumNative, + message.vali_updatePolicy_globalMaximumNative + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.globalMaximumOpCount, + message.vali_updatePolicy_globalMaximumOpCount + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isFalse( + responseBody.perUserMaximumApplicable, + message.vali_updatePolicy_perUserMaximumApplicable + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.perUserMaximumUsd, + message.vali_updatePolicy_perUserMaximumUsd + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.perUserMaximumNative, + message.vali_updatePolicy_perUserMaximumNative + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.perUserMaximumOpCount, + message.vali_updatePolicy_perUserMaximumOpCount + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isFalse( + responseBody.perOpMaximumApplicable, + message.vali_updatePolicy_perOpMaximumApplicable + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.perOpMaximumUsd, + message.vali_updatePolicy_perOpMaximumUsd + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.perOpMaximumNative, + message.vali_updatePolicy_perOpMaximumNative + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.addressAllowList, + message.vali_updatePolicy_addressAllowList + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.addressBlockList, + message.vali_updatePolicy_addressBlockList + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_updatePolicy_1); + } + } + ); + + it( + 'SMOKE: Validate the update policy endpoint of Arka on the ' + + randomChainName + + ' network: case 2', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: updatedRandomName, + description: updatedRandomDescription, + isPublic: true, + isEnabled: false, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: data.globalMaximumUsd, + globalMaximumNative: data.globalMaximumNative, + globalMaximumOpCount: data.globalMaximumOpCount, + perUserMaximumApplicable: true, + perUserMaximumUsd: data.perUserMaximumUsd, + perUserMaximumNative: data.perUserMaximumNative, + perUserMaximumOpCount: data.perUserMaximumOpCount, + perOpMaximumApplicable: true, + perOpMaximumUsd: data.perOpMaximumUsd, + perOpMaximumNative: data.perOpMaximumUsd, + }; + + // send POST request with headers and data + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal(responseBody.id, newId, message.vali_updatePolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_updatePolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + updatedRandomName, + message.vali_updatePolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + updatedRandomDescription, + message.vali_updatePolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.isPublic, + message.vali_updatePolicy_isPublic + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isFalse( + responseBody.isEnabled, + message.vali_updatePolicy_isEnabled + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.isApplicableToAllNetworks, + message.vali_updatePolicy_isApplicableToAllNetworks + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.enabledChains[0], + randomChainId, + message.vali_updatePolicy_enabledChains + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.supportedEPVersions[0], + data.ep07, + message.vali_updatePolicy_supportedEPVersions + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.isPerpetual, + message.vali_updatePolicy_isPerpetual + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.globalMaximumApplicable, + message.vali_updatePolicy_globalMaximumApplicable + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.globalMaximumUsd, + data.globalMaximumUsd, + message.vali_updatePolicy_globalMaximumUsd + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.globalMaximumNative, + data.globalMaximumNative, + message.vali_updatePolicy_globalMaximumNative + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.globalMaximumOpCount, + data.globalMaximumOpCount, + message.vali_updatePolicy_globalMaximumOpCount + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.perUserMaximumApplicable, + message.vali_updatePolicy_perUserMaximumApplicable + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.perUserMaximumUsd, + data.perUserMaximumUsd, + message.vali_updatePolicy_perUserMaximumUsd + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.perUserMaximumNative, + data.perUserMaximumNative, + message.vali_updatePolicy_perUserMaximumNative + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.perUserMaximumOpCount, + data.perUserMaximumOpCount, + message.vali_updatePolicy_perUserMaximumOpCount + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.perOpMaximumApplicable, + message.vali_updatePolicy_perOpMaximumApplicable + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.perOpMaximumUsd, + data.perOpMaximumUsd, + message.vali_updatePolicy_perOpMaximumUsd + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.perOpMaximumNative, + data.perOpMaximumNative, + message.vali_updatePolicy_perOpMaximumNative + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.addressAllowList, + message.vali_updatePolicy_addressAllowList + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.addressBlockList, + message.vali_updatePolicy_addressBlockList + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_updatePolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.invalid_sponsorAddress, // invalid wallet address + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_2); + assert.fail(message.vali_updatePolicy_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_updatePolicy_2, + 403 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.incorrect_sponsorAddress, // incorrect wallet address + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_3); + assert.fail(message.vali_updatePolicy_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_updatePolicy_3, + 403 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + name: randomName, // without wallet address + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_4); + assert.fail(message.vali_updatePolicy_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_2, + message.fail_updatePolicy_4, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with wallet address as a empty string on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: '', // empty string + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_5); + assert.fail(message.vali_updatePolicy_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_2, + message.fail_updatePolicy_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with wallet address as a only blank spaces on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: ' ', // blank spaces + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_6); + assert.fail(message.vali_updatePolicy_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_updatePolicy_6, + 403 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka without name on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + description: randomDescription, // without name + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_7); + assert.fail(message.vali_updatePolicy_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with name as a empty string on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: '', // empty string + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_8); + assert.fail(message.vali_updatePolicy_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with name as a only blank spaces on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: ' ', // blank spaces + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_9); + assert.fail(message.vali_updatePolicy_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_9, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka without description on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: randomName, // without description + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_10); + assert.fail(message.vali_updatePolicy_10); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_10, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with description as a empty string on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: randomName, + description: '', // empty string + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_11); + assert.fail(message.vali_updatePolicy_11); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_11, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with description as a only blank spaces on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: randomName, + description: ' ', // blank spaces + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_12); + assert.fail(message.vali_updatePolicy_12); + } catch (e) { + // TO DO: Update the constant message + // TO DO: Update the status code + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_updatePolicy_12, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka without EPVersion on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + isPerpetual: true, // without EPVersion + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_13); + assert.fail(message.vali_updatePolicy_13); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_13, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with invalid EPVersion on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EP_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_14); + assert.fail(message.vali_updatePolicy_14); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_14, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with EPVersion empty array on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: [], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_15); + assert.fail(message.vali_updatePolicy_15); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_15, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with invalid id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: data.invalid_newId, // invalid id + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_16); + assert.fail(message.vali_updatePolicy_16); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_updatePolicy_16, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with incorrect id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: data.incorrect_newId, // incorrect id + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_17); + assert.fail(message.vali_updatePolicy_17); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_updatePolicy_17, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka without id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_18); + assert.fail(message.vali_updatePolicy_18); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_7, + message.fail_updatePolicy_18, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with zero value id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: data.zero_newId, // zero value id + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_19); + assert.fail(message.vali_updatePolicy_19); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_7, + message.fail_updatePolicy_19, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with negative value id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: `-${newId}`, // negative id + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_20); + assert.fail(message.vali_updatePolicy_20); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_updatePolicy_20, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: updatedRandomName, + description: updatedRandomDescription, + isPublic: true, + isEnabled: false, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + invalid_headers, + } + ); + + addContext(test, message.vali_updatePolicy_21); + assert.fail(message.vali_updatePolicy_21); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_updatePolicy_21, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: updatedRandomName, + description: updatedRandomDescription, + isPublic: true, + isEnabled: false, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_updatePolicy_22); + assert.fail(message.vali_updatePolicy_22); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_updatePolicy_22, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: updatedRandomName, + description: updatedRandomDescription, + isPublic: true, + isEnabled: false, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_updatePolicy_23); + assert.fail(message.vali_updatePolicy_23); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_updatePolicy_23, + 400 + ); + } + } + ); +}); diff --git a/test/specs/skandhaBenchmark/erc20Transaction/mumbai.spec.js b/test/specs/skandhaBenchmark/erc20Transaction/mumbai.spec.js deleted file mode 100644 index 4c3b976..0000000 --- a/test/specs/skandhaBenchmark/erc20Transaction/mumbai.spec.js +++ /dev/null @@ -1,150 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk } from '@etherspot/prime-sdk'; -import { ethers, utils } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import addContext from 'mochawesome/addContext.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; - -const txCount = data.txCount; - -describe('Determine benchmarks of skandha with erc20 token transaction on the mumbai network', function () { - it('Perform the transfer ERC20 token on the mumbai network for determine benchmarks of skandha', async function () { - var test = this; - await customRetryAsync(async function () { - let mumbaiTestNetSdk; - try { - mumbaiTestNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.mumbai_chainid), - projectKey: process.env.PROJECT_KEY_TESTNET, - } - ); - } catch (e) { - console.error(e); - return; // Handle initialization error and skip test - } - - // clear the transaction batch - try { - await mumbaiTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The transaction of the batch is not clear correctly.'); - } - - let userOpsBatch = []; - for (let j = 0; j < txCount; j++) { - let transactionData; - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_mumbai - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The provider response is not displayed correctly.'); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_mumbaiUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The get erc20 Contract Interface is not performed.'); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'The decimals from erc20 contract is not displayed correctly.' - ); - } - - // get transferFrom encoded data - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'The decimals from erc20 contract is not displayed correctly.' - ); - } - - // Add transaction data to the batch - try { - userOpsBatch.push( - await mumbaiTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_mumbaiUSDC, - data: transactionData, - }) - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'An error is displayed while Adding the transactions to the batch.' - ); - } - } - - // estimate the transaction for the UserOps batch - let op; - try { - op = await mumbaiTestNetSdk.estimate(); - console.log(`Estimated the ${txCount} transactions successfully.`); - addContext(test, `Estimated the ${txCount} transactions successfully.`); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'An error is displayed while performing the estimation of the transactions.' - ); - } - - // submit the transaction - let uoHash; - try { - uoHash = await mumbaiTestNetSdk.send(op); - console.log(`UserOp Hash: ${uoHash}`); - addContext(test, `UserOp Hash: ${uoHash}`); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('An error is displayed while submit the transactions.'); - } - }, data.retry); // Retry this async test up to 5 times - }); -}); diff --git a/test/specs/skandhaBenchmark/erc20Transaction/sepolia.spec.js b/test/specs/skandhaBenchmark/erc20Transaction/sepolia.spec.js deleted file mode 100644 index 99a2c08..0000000 --- a/test/specs/skandhaBenchmark/erc20Transaction/sepolia.spec.js +++ /dev/null @@ -1,150 +0,0 @@ -import * as dotenv from 'dotenv'; -dotenv.config(); // init dotenv -import { PrimeSdk } from '@etherspot/prime-sdk'; -import { ethers, utils } from 'ethers'; -import { assert } from 'chai'; -import { ERC20_ABI } from '@etherspot/prime-sdk/dist/sdk/helpers/abi/ERC20_ABI.js'; -import customRetryAsync from '../../../utils/baseTest.js'; -import addContext from 'mochawesome/addContext.js'; -import data from '../../../data/testData.json' assert { type: 'json' }; - -const txCount = data.txCount; - -describe('Determine benchmarks of skandha with erc20 token transaction on the sepolia network', function () { - it('Perform the transfer ERC20 token on the sepolia network for determine benchmarks of skandha', async function () { - var test = this; - await customRetryAsync(async function () { - let sepoliaTestNetSdk; - try { - sepoliaTestNetSdk = new PrimeSdk( - { privateKey: process.env.PRIVATE_KEY }, - { - chainId: Number(data.sepolia_chainid), - projectKey: process.env.PROJECT_KEY_TESTNET, - } - ); - } catch (e) { - console.error(e); - return; // Handle initialization error and skip test - } - - // clear the transaction batch - try { - await sepoliaTestNetSdk.clearUserOpsFromBatch(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The transaction of the batch is not clear correctly.'); - } - - let userOpsBatch = []; - for (let j = 0; j < txCount; j++) { - let transactionData; - // get the respective provider details - let provider; - try { - provider = new ethers.providers.JsonRpcProvider( - data.providerNetwork_sepolia - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The provider response is not displayed correctly.'); - } - - // get erc20 Contract Interface - let erc20Instance; - try { - erc20Instance = new ethers.Contract( - data.tokenAddress_sepoliaUSDC, - ERC20_ABI, - provider - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('The get erc20 Contract Interface is not performed.'); - } - - // get decimals from erc20 contract - let decimals; - try { - decimals = await erc20Instance.functions.decimals(); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'The decimals from erc20 contract is not displayed correctly.' - ); - } - - // get transferFrom encoded data - try { - transactionData = erc20Instance.interface.encodeFunctionData( - 'transfer', - [ - data.recipient, - ethers.utils.parseUnits(data.erc20_value, decimals), - ] - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'The decimals from erc20 contract is not displayed correctly.' - ); - } - - // Add transaction data to the batch - try { - userOpsBatch.push( - await sepoliaTestNetSdk.addUserOpsToBatch({ - to: data.tokenAddress_sepoliaUSDC, - data: transactionData, - }) - ); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'An error is displayed while Adding the transactions to the batch.' - ); - } - } - - // estimate the transaction for the UserOps batch - let op; - try { - op = await sepoliaTestNetSdk.estimate(); - console.log(`Estimated the ${txCount} transactions successfully.`); - addContext(test, `Estimated the ${txCount} transactions successfully.`); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail( - 'An error is displayed while performing the estimation of the transactions.' - ); - } - - // submit the transaction - let uoHash; - try { - uoHash = await sepoliaTestNetSdk.send(op); - console.log(`UserOp Hash: ${uoHash}`); - addContext(test, `UserOp Hash: ${uoHash}`); - } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail('An error is displayed while submit the transactions.'); - } - }, data.retry); // Retry this async test up to 5 times - }); -}); diff --git a/test/specs/testnet/paymasterAPIs/arka/checkWhitelist.spec.js b/test/specs/testnet/paymasterAPIs/arka/checkWhitelist.spec.js new file mode 100644 index 0000000..cb63090 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/arka/checkWhitelist.spec.js @@ -0,0 +1,790 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import Helper from '../../../../utils/helper.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the check whitelist endpoint of the Arka', function () { + it( + 'SMOKE: Validate the Check Whitelist endpoint which was already whitelisted with v1 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_1); + } + + // wait for the few seconds + Helper.wait(15000); + + // check the whitelist status + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_1); + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v1 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // check the whitelist status + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_1); + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_invalid, // invalid url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_2); + console.log(message.fail_checkWhitelistv1_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv1_2); + console.log(message.vali_checkWhitelistv1_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_incorrect, // incorrect url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_3); + console.log(message.fail_checkWhitelistv1_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv1_3); + console.log(message.vali_checkWhitelistv1_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [ + data.invalid_sponsorAddress, // invalid address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_4); + console.log(message.fail_checkWhitelistv1_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_checkWhitelistv1_4); + console.log(message.vali_checkWhitelistv1_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_44); + console.log(message.fail_checkWhitelistv1_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_checkWhitelistv1_44); + console.log(message.vali_checkWhitelistv1_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [ + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_5); + console.log(message.fail_checkWhitelistv1_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_5); + console.log(message.vali_checkWhitelistv1_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [addresses, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_6); + console.log(message.fail_checkWhitelistv1_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_6); + console.log(message.vali_checkWhitelistv1_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [addresses, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_7); + console.log(message.fail_checkWhitelistv1_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_7); + console.log(message.vali_checkWhitelistv1_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist, + { + params: [addresses, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_checkWhitelistv1_8); + console.log(message.fail_checkWhitelistv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_8); + console.log(message.vali_checkWhitelistv1_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_8); + } + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was already whitelisted with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_1); + } + + // wait for the few seconds + Helper.wait(15000); + + // check the whitelist status + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_1); + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // check the whitelist status + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2_invalid, // invalid url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_2); + console.log(message.fail_checkWhitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv2_2); + console.log(message.vali_checkWhitelistv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2_incorrect, // incorrect url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_3); + console.log(message.fail_checkWhitelistv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv2_3); + console.log(message.vali_checkWhitelistv2_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [ + data.invalid_sponsorAddress, // invalid address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_4); + console.log(message.fail_checkWhitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_checkWhitelistv2_4); + console.log(message.vali_checkWhitelistv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_4); + console.log(message.fail_checkWhitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_checkWhitelistv2_4); + console.log(message.vali_checkWhitelistv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [ + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_5); + console.log(message.fail_checkWhitelistv2_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv2_5); + console.log(message.vali_checkWhitelistv2_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [addresses, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_6); + console.log(message.fail_checkWhitelistv2_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv2_6); + console.log(message.vali_checkWhitelistv2_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [addresses, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_7); + console.log(message.fail_checkWhitelistv2_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv2_7); + console.log(message.vali_checkWhitelistv2_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_checkwhitelist_v2, + { + params: [addresses, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_checkWhitelistv2_8); + console.log(message.fail_checkWhitelistv2_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv2_8); + console.log(message.vali_checkWhitelistv2_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_8); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/arka/deposit.spec.js b/test/specs/testnet/paymasterAPIs/arka/deposit.spec.js new file mode 100644 index 0000000..9c91679 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/arka/deposit.spec.js @@ -0,0 +1,648 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the deposit endpoint of the Arka', function () { + it( + 'SMOKE: Validate the deposit endpoint with v1 of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_deposit, + { + params: [data.value, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.deposit_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_1); + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v1 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_invalid, // invalid url + { + params: [data.value, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_depositv1_2); + console.log(message.fail_depositv1_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_depositv1_2); + console.log(message.vali_depositv1_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v1 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_incorrect, // incorrect url + { + params: [data.value, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_depositv1_3); + console.log(message.fail_depositv1_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_depositv1_3); + console.log(message.vali_depositv1_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_3); + } + } + } + ); + + it.only( + 'REGRESSION: Validate the deposit endpoint with v1 and invalid value of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit, + { + params: [ + data.invalidValue, // invalid value + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_depositv1_4); + console.log(message.fail_depositv1_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_depositv1_4); + console.log(message.vali_depositv1_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_4); + } + } + } + ); + + it.only( + 'REGRESSION: Validate the deposit endpoint with v1 and exceeded value of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit, + { + params: [ + data.exceededValue, // exceeded value + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_depositv1_44); + console.log(message.fail_depositv1_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deposit_3)) { + addContext(test, message.vali_depositv1_44); + console.log(message.vali_depositv1_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_44); + } + } + } + ); + + it.only( + 'REGRESSION: Validate the deposit endpoint with v1 and without value of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit, + { + params: [ + randomChainId, // without value + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_depositv1_5); + console.log(message.fail_depositv1_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv1_5); + console.log(message.vali_depositv1_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v1 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit, + { + params: [data.value, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_depositv1_6); + console.log(message.fail_depositv1_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv1_6); + console.log(message.vali_depositv1_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v1 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit, + { + params: [data.value, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_depositv1_7); + console.log(message.fail_depositv1_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv1_7); + console.log(message.vali_depositv1_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit, + { + params: [data.value, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_depositv1_8); + console.log(message.fail_depositv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv1_8); + console.log(message.vali_depositv1_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv1_8); + } + } + } + ); + + it( + 'SMOKE: Validate the deposit endpoint with v2 of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [data.value, randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.deposit_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2_invalid, // invalid url + { + params: [data.value, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_depositv2_2); + console.log(message.fail_depositv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_depositv2_2); + console.log(message.vali_depositv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2_incorrect, // incorrect url + { + params: [data.value, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_depositv2_3); + console.log(message.fail_depositv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_depositv2_3); + console.log(message.vali_depositv2_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_3); + } + } + } + ); + + it.only( + 'REGRESSION: Validate the deposit endpoint with v2 and invalid value of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [ + data.invalidValue, // invalid value + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_depositv2_4); + console.log(message.fail_depositv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_depositv2_4); + console.log(message.vali_depositv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_4); + } + } + } + ); + + it.only( + 'REGRESSION: Validate the deposit endpoint with v2 and exceeded value of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [ + data.exceededValue, // exceeded value + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_depositv2_4); + console.log(message.fail_depositv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deposit_3)) { + addContext(test, message.vali_depositv2_4); + console.log(message.vali_depositv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_4); + } + } + } + ); + + it.only( + 'REGRESSION: Validate the deposit endpoint with v2 and without value of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [ + randomChainId, // without value + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_depositv2_5); + console.log(message.fail_depositv2_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_5); + console.log(message.vali_depositv2_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [data.value, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_depositv2_6); + console.log(message.fail_depositv2_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_6); + console.log(message.vali_depositv2_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [data.value, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_depositv2_7); + console.log(message.fail_depositv2_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_7); + console.log(message.vali_depositv2_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_deposit_v2, + { + params: [data.value, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_depositv2_8); + console.log(message.fail_depositv2_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_8); + console.log(message.vali_depositv2_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_8); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/arka/getAllWhitelist.spec.js b/test/specs/testnet/paymasterAPIs/arka/getAllWhitelist.spec.js new file mode 100644 index 0000000..d9f0dfb --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/arka/getAllWhitelist.spec.js @@ -0,0 +1,296 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import Helper from '../../../../utils/helper.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the get all whitelist endpoint of the Arka', function () { + it( + 'SMOKE: Validate the get all whitelist endpoint with whitelisted address and v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_1); + } + + // wait for the few seconds + Helper.wait(15000); + + // validate the get all whitelist endpoint + try { + const response = await axios.post( + data.arka_getAllWhitelist_v2, + { + params: ['1', randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.isNotEmpty( + response.addresses, + message.vali_getAllWhitelist_addresses + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_2); + } + } + ); + + it( + 'REGRESSION: Validate the get all whitelist endpoint with new random address and v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + try { + const response = await axios.post( + data.arka_getAllWhitelist_v2, + { + params: ['1', randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + addContext(test, message.fail_getAllWhitelistv2_9); + console.log(message.fail_getAllWhitelistv2_9); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.getAllWhitelist_1)) { + addContext(test, message.vali_getAllWhitelistv2_1); + console.log(message.vali_getAllWhitelistv2_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_9); + } + } + } + ); + + it( + 'REGRESSION: Validate the get all whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_getAllWhitelist_invalid, // invalid url + { + params: ['1', randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_getAllWhitelistv2_2); + console.log(message.fail_getAllWhitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_getAllWhitelistv2_2); + console.log(message.vali_getAllWhitelistv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the get all whitelist endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_getAllWhitelist_incorrect, // incorrect url + { + params: ['1', randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_getAllWhitelistv2_3); + console.log(message.fail_getAllWhitelistv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_getAllWhitelistv2_3); + console.log(message.vali_getAllWhitelistv2_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the get all whitelist endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_getAllWhitelist, + { + params: ['1', randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_getAllWhitelistv2_6); + console.log(message.fail_getAllWhitelistv2_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_getAllWhitelistv2_6); + console.log(message.vali_getAllWhitelistv2_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the get all whitelist endpoint with v2 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_getAllWhitelist, + { + params: ['1', randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_getAllWhitelistv2_7); + console.log(message.fail_getAllWhitelistv2_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_getAllWhitelistv2_7); + console.log(message.vali_getAllWhitelistv2_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the get all whitelist endpoint with v2 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_getAllWhitelist, + { + params: ['1', process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_getAllWhitelistv2_8); + console.log(message.fail_getAllWhitelistv2_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_getAllWhitelistv2_8); + console.log(message.vali_getAllWhitelistv2_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getAllWhitelistv2_8); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/arka/metadata.spec.js b/test/specs/testnet/paymasterAPIs/arka/metadata.spec.js new file mode 100644 index 0000000..3d4c93d --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/arka/metadata.spec.js @@ -0,0 +1,264 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the metadata endpoint of the Arka', function () { + it( + 'SMOKE: Validate the metadata endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + // validate the metadata endpoint + try { + const response = await axios.get( + data.arka_metadata, + { + params: { chainId: randomChainId, apiKey: process.env.API_KEY }, + }, + header + ); + + // validate the sponsorAddress parameter in the response + assert.isNotEmpty( + response.data.sponsorAddress, + message.vali_metadata_sponsorAddress + ); + + // validate the sponsorWalletBalance parameter in the response + assert.isNotEmpty( + response.data.sponsorWalletBalance, + message.vali_metadata_sponsorWalletBalance + ); + + // validate the sponsorBalance parameter in the response + assert.isNotEmpty( + response.data.sponsorBalance, + message.vali_metadata_sponsorBalance + ); + + // validate the chainsSupported parameter in the response + assert.isNotEmpty( + response.data.chainsSupported, + message.vali_metadata_chainsSupported + ); + + // validate the tokenPaymasters parameter in the response + assert.isNotEmpty( + response.data.tokenPaymasters, + message.vali_metadata_tokenPaymasters + ); + + // validate the multiTokenPaymasters parameter in the response + assert.isNotEmpty( + response.data.multiTokenPaymasters, + message.vali_metadata_multiTokenPaymasters + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_1); + } + } + ); + + it( + 'REGRESSION: Validate the Metadata endpoint invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_metadata_invalid, // invalid url + { + params: { chainId: randomChainId, apiKey: process.env.API_KEY }, + }, + header + ); + + addContext(test, message.fail_metadata_2); + console.log(message.fail_metadata_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_metadata_2); + console.log(message.vali_metadata_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Metadata endpoint incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_metadata_incorrect, // incorrect url + { + params: { chainId: randomChainId, apiKey: process.env.API_KEY }, + }, + header + ); + + addContext(test, message.fail_metadata_3); + console.log(message.fail_metadata_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_metadata_3); + console.log(message.vali_metadata_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Metadata endpoint invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_metadata, + { + params: { chainId: randomChainId, apiKey: 'arka_public' }, // invalid apikey + }, + header + ); + + addContext(test, message.fail_metadata_6); + console.log(message.fail_metadata_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_metadata_6); + console.log(message.vali_metadata_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Metadata endpoint without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_metadata, + { + params: { chainId: randomChainId }, // without apikey + }, + header + ); + + addContext(test, message.fail_metadata_7); + console.log(message.fail_metadata_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_metadata_7); + console.log(message.vali_metadata_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Metadata endpoint without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_metadata, + { + params: { apiKey: process.env.API_KEY }, // without chainid + }, + header + ); + + addContext(test, message.fail_metadata_8); + console.log(message.fail_metadata_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_metadata_8); + console.log(message.vali_metadata_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_metadata_8); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/arka/pimlicoAddress.spec.js b/test/specs/testnet/paymasterAPIs/arka/pimlicoAddress.spec.js new file mode 100644 index 0000000..4c9f039 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/arka/pimlicoAddress.spec.js @@ -0,0 +1,479 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import Helper from '../../../../utils/helper.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the pimlico address endpoint of the Arka', function () { + it( + 'SMOKE: Validate the pimlico address endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_1); + } + + // wait for the few seconds + Helper.wait(15000); + + // check the whitelist status + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + addresses, + { token: data.usdc_token }, + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_1); + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint with invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico_invalid, // invalid url + { + params: [ + addresses, + { token: data.usdc_token }, + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_2); + console.log(message.fail_pimlicoAddress_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_pimlicoAddress_2); + console.log(message.vali_pimlicoAddress_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint with incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico_incorrect, // incorrect url + { + params: [ + addresses, + { token: data.usdc_token }, + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_3); + console.log(message.fail_pimlicoAddress_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_pimlicoAddress_3); + console.log(message.vali_pimlicoAddress_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint with invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + data.invalid_sponsorAddress, // invalid address + { token: data.usdc_token }, + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_4); + console.log(message.fail_pimlicoAddress_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_pimlicoAddress_4); + console.log(message.vali_pimlicoAddress_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint with incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + { token: data.usdc_token }, + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_44); + console.log(message.fail_pimlicoAddress_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_pimlicoAddress_44); + console.log(message.vali_pimlicoAddress_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + { token: data.usdc_token }, + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_5); + console.log(message.fail_pimlicoAddress_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_pimlicoAddress_5); + console.log(message.vali_pimlicoAddress_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint with invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + addresses, + { token: data.usdc_token }, + randomChainId, + 'arka_public', + ], // invalid apikey + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_6); + console.log(message.fail_pimlicoAddress_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_pimlicoAddress_6); + console.log(message.vali_pimlicoAddress_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [addresses, { token: data.usdc_token }, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_7); + console.log(message.fail_pimlicoAddress_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_pimlicoAddress_7); + console.log(message.vali_pimlicoAddress_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + addresses, + { token: data.usdc_token }, + process.env.API_KEY, + ], // without chainid + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_8); + console.log(message.fail_pimlicoAddress_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_pimlicoAddress_8); + console.log(message.vali_pimlicoAddress_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_8); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint with invalid token of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + addresses, + { token: data.invalid_usdc_token }, // invalid token + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_9); + console.log(message.fail_pimlicoAddress_9); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_pimlicoAddress_9); + console.log(message.vali_pimlicoAddress_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_9); + } + } + } + ); + + it( + 'REGRESSION: Validate the pimlico address endpoint without token of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_pimlico, + { + params: [ + addresses, + // without token + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_pimlicoAddress_10); + console.log(message.fail_pimlicoAddress_10); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_pimlicoAddress_10); + console.log(message.vali_pimlicoAddress_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_pimlicoAddress_10); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/arka/removeWhitelist.spec.js b/test/specs/testnet/paymasterAPIs/arka/removeWhitelist.spec.js new file mode 100644 index 0000000..b536b08 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/arka/removeWhitelist.spec.js @@ -0,0 +1,798 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import Helper from '../../../../utils/helper.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the remove whitelist endpoint of the Arka', function () { + it( + 'SMOKE: Validate the Remove Whitelist endpoint which was already whitelisted with v1 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_1); + } + + // wait for the few seconds + Helper.wait(15000); + + // validate the remove whitelist endpoint + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.remove_whitelist_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_1); + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint which was not whitelisted with v1 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // validate remove whitelist endpoint + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_0); + console.log(message.fail_removeWhitelistv1_0); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_invalid, // invalid url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_2); + console.log(message.fail_removeWhitelistv1_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_removeWhitelistv1_2); + console.log(message.vali_removeWhitelistv1_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_incorrect, // incorrect url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_3); + console.log(message.fail_removeWhitelistv1_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_removeWhitelistv1_3); + console.log(message.vali_removeWhitelistv1_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [ + data.invalid_sponsorAddress, // invalid address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_4); + console.log(message.fail_removeWhitelistv1_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_removeWhitelistv1_4); + console.log(message.vali_removeWhitelistv1_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_44); + console.log(message.fail_removeWhitelistv1_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_removeWhitelistv1_44); + console.log(message.vali_removeWhitelistv1_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [ + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_5); + console.log(message.fail_removeWhitelistv1_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_5); + console.log(message.vali_removeWhitelistv1_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [addresses, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_6); + console.log(message.fail_removeWhitelistv1_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_6); + console.log(message.vali_removeWhitelistv1_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [addresses, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_7); + console.log(message.fail_removeWhitelistv1_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_7); + console.log(message.vali_removeWhitelistv1_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist, + { + params: [addresses, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_removeWhitelistv1_8); + console.log(message.fail_removeWhitelistv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_8); + console.log(message.vali_removeWhitelistv1_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_8); + } + } + } + ); + + it( + 'SMOKE: Validate the Remove Whitelist endpoint which was already whitelisted with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // make the random address whitelisted + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_1); + } + + // wait for the few seconds + Helper.wait(15000); + + // validate the remove whitelist endpoint + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY_ARKA], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.remove_whitelist_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint which was not whitelisted with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // validate remove whitelist endpoint + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_2); + console.log(message.fail_removeWhitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.remove_whitelist_1)) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_1); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2_invalid, // invalid url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_2); + console.log(message.fail_removeWhitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_removeWhitelistv2_2); + console.log(message.vali_removeWhitelistv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2_incorrect, // incorrect url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_3); + console.log(message.fail_removeWhitelistv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_removeWhitelistv2_3); + console.log(message.vali_removeWhitelistv2_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [ + data.invalid_sponsorAddress, // invalid address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_4); + console.log(message.fail_removeWhitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_removeWhitelistv2_4); + console.log(message.vali_removeWhitelistv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_44); + console.log(message.fail_removeWhitelistv2_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_removeWhitelistv2_44); + console.log(message.vali_removeWhitelistv2_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [ + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_5); + console.log(message.fail_removeWhitelistv2_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv2_5); + console.log(message.vali_removeWhitelistv2_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [addresses, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_6); + console.log(message.fail_removeWhitelistv2_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv2_6); + console.log(message.vali_removeWhitelistv2_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [addresses, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_7); + console.log(message.fail_removeWhitelistv2_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv2_7); + console.log(message.vali_removeWhitelistv2_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_removeWhitelist_v2, + { + params: [addresses, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_removeWhitelistv2_8); + console.log(message.fail_removeWhitelistv2_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv2_8); + console.log(message.vali_removeWhitelistv2_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_8); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/arka/whitelist.spec.js b/test/specs/testnet/paymasterAPIs/arka/whitelist.spec.js new file mode 100644 index 0000000..472fc44 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/arka/whitelist.spec.js @@ -0,0 +1,671 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the whitelist endpoint of the Arka', function () { + it( + 'SMOKE: Validate the Whitelist endpoint with v1 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_1); + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_invalid, // invalid url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_whitelistv1_2); + console.log(message.fail_whitelistv1_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_whitelistv1_2); + console.log(message.vali_whitelistv1_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_incorrect, // incorrect url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_whitelistv1_3); + console.log(message.fail_whitelistv1_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_whitelistv1_3); + console.log(message.vali_whitelistv1_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [ + data.invalid_sponsorAddress, // invalid address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_whitelistv1_4); + console.log(message.fail_whitelistv1_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_whitelistv1_4); + console.log(message.vali_whitelistv1_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_whitelistv1_44); + console.log(message.fail_whitelistv1_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_whitelistv1_44); + console.log(message.vali_whitelistv1_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [ + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_whitelistv1_5); + console.log(message.fail_whitelistv1_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv1_5); + console.log(message.vali_whitelistv1_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [addresses, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_whitelistv1_6); + console.log(message.fail_whitelistv1_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv1_6); + console.log(message.vali_whitelistv1_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [addresses, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_whitelistv1_7); + console.log(message.fail_whitelistv1_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv1_7); + console.log(message.vali_whitelistv1_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist, + { + params: [addresses, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_whitelistv1_8); + console.log(message.fail_whitelistv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv1_8); + console.log(message.vali_whitelistv1_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_8); + } + } + } + ); + + it( + 'SMOKE: Validate the Whitelist endpoint with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_v2_invalid, // invalid url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_whitelistv2_2); + console.log(message.fail_whitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_whitelistv2_2); + console.log(message.vali_whitelistv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_v2_incorrect, // incorrect url + { + params: [addresses, randomChainId, process.env.API_KEY], + }, + header + ); + + addContext(test, message.fail_whitelistv2_3); + console.log(message.fail_whitelistv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_whitelistv2_3); + console.log(message.vali_whitelistv2_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [ + data.invalid_sponsorAddress, // invalid address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_whitelistv2_4); + console.log(message.fail_whitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_whitelistv2_4); + console.log(message.vali_whitelistv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [ + data.incorrect_sponsorAddress, // incorrect address + randomChainId, + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_whitelistv2_4); + console.log(message.fail_whitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_whitelistv2_4); + console.log(message.vali_whitelistv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [ + randomChainId, // without address + process.env.API_KEY, + ], + }, + header + ); + + addContext(test, message.fail_whitelistv2_5); + console.log(message.fail_whitelistv2_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv2_5); + console.log(message.vali_whitelistv2_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [addresses, randomChainId, 'arka_public'], // invalid apikey + }, + header + ); + + addContext(test, message.fail_whitelistv2_6); + console.log(message.fail_whitelistv2_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv2_6); + console.log(message.vali_whitelistv2_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [addresses, randomChainId], // without apikey + }, + header + ); + + addContext(test, message.fail_whitelistv2_7); + console.log(message.fail_whitelistv2_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv2_7); + console.log(message.vali_whitelistv2_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + try { + const response = await axios.post( + data.arka_whitelist_v2, + { + params: [addresses, process.env.API_KEY], // without chainid + }, + header + ); + + addContext(test, message.fail_whitelistv2_8); + console.log(message.fail_whitelistv2_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv2_8); + console.log(message.vali_whitelistv2_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_8); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/skandha/eth_estimateUserOperationGas.spec.js b/test/specs/testnet/paymasterAPIs/skandha/eth_estimateUserOperationGas.spec.js new file mode 100644 index 0000000..11c37a5 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/skandha/eth_estimateUserOperationGas.spec.js @@ -0,0 +1,2261 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import { PrimeSdk, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers } from 'ethers'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import helper from '../../../../utils/helper.js'; + +let testnetPrimeSdk; +let sender; +let nonce; +let initCode; +let callData; +let callGasLimit; +let verificationGasLimit; +let maxFeePerGas; +let maxPriorityFeePerGas; +let paymasterAndData; +let preVerificationGas; +let signature; + +//define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the estimate user operation gas endpoint of the skandha', function () { + it( + 'PRECONDITION: Perform the transfer native token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + //wait for the execution + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + + sender = op.sender; + nonce = op.nonce; + initCode = op.initCode; + callData = op.callData; + callGasLimit = op.callGasLimit; + verificationGasLimit = op.verificationGasLimit; + maxFeePerGas = op.maxFeePerGas; + maxPriorityFeePerGas = op.maxPriorityFeePerGas; + paymasterAndData = op.paymasterAndData; + preVerificationGas = op.preVerificationGas; + signature = op.signature; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + } + ); + + it( + 'SMOKE: Validate the eth_estimateUserOperationGas endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + // Add assertions + try { + assert.isNumber( + response.data.id, + message.vali_skandha_estimateUserOperationGas_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.preVerificationGas, + message.vali_skandha_estimateUserOperationGas_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.verificationGasLimit, + message.vali_skandha_estimateUserOperationGas_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.callGasLimit, + message.vali_skandha_estimateUserOperationGas_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.verificationGas, + message.vali_skandha_estimateUserOperationGas_verificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.maxFeePerGas, + message.vali_skandha_estimateUserOperationGas_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.maxPriorityFeePerGas, + message.vali_skandha_estimateUserOperationGas_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.validUntil, + message.vali_skandha_estimateUserOperationGas_validUntil + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_1); + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid sender on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: data.invalidSender, // invalid sender + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_2); + console.log(message.fail_skandha_estimateUserOperationGas_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_2); + console.log(message.vali_skandha_estimateUserOperationGas_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect sender on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: data.incorrectSender, // incorrect sender + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_3); + console.log(message.fail_skandha_estimateUserOperationGas_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.skandha_error_2)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_3); + console.log(message.vali_skandha_estimateUserOperationGas_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without sender on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + nonce: nonce, // without sender + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_3); + console.log(message.fail_skandha_estimateUserOperationGas_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_3); + console.log(message.vali_skandha_estimateUserOperationGas_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid nonce on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: data.invalid_hex, // invalid nonce + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_5); + console.log(message.fail_skandha_estimateUserOperationGas_5); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_5); + console.log(message.vali_skandha_estimateUserOperationGas_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect nonce on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: data.incorrect_hex, // incorrect nonce + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_6); + console.log(message.fail_skandha_estimateUserOperationGas_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_6); + console.log(message.vali_skandha_estimateUserOperationGas_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without nonce on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, // without nonce + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_7); + console.log(message.fail_skandha_estimateUserOperationGas_7); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_7); + console.log(message.vali_skandha_estimateUserOperationGas_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid initCode on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: data.invalid_hex, // invalid initCode + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_8); + console.log(message.fail_skandha_estimateUserOperationGas_8); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_8); + console.log(message.vali_skandha_estimateUserOperationGas_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_8); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect initCode on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: data.incorrect_hex, // incorrect initCode + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_9); + console.log(message.fail_skandha_estimateUserOperationGas_9); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_9); + console.log(message.vali_skandha_estimateUserOperationGas_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_9); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without initCode on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, // without initCode + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_10); + console.log(message.fail_skandha_estimateUserOperationGas_10); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_10); + console.log(message.vali_skandha_estimateUserOperationGas_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_10); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid callData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: data.invalid_hex, // invalid callData + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_11); + console.log(message.fail_skandha_estimateUserOperationGas_11); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.skandha_error_2)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_11); + console.log(message.vali_skandha_estimateUserOperationGas_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_11); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect callData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: data.incorrect_hex, // incorrect callData + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_12); + console.log(message.fail_skandha_estimateUserOperationGas_12); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_12); + console.log(message.vali_skandha_estimateUserOperationGas_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_12); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without callData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, // without callData + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_13); + console.log(message.fail_skandha_estimateUserOperationGas_13); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_13); + console.log(message.vali_skandha_estimateUserOperationGas_13); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_13); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid callGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: data.invalid_hex, // invalid callGasLimit + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_14); + console.log(message.fail_skandha_estimateUserOperationGas_14); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_14); + console.log(message.vali_skandha_estimateUserOperationGas_14); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_14); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect callGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: data.incorrect_hex, // incorrect callGasLiit + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_15); + console.log(message.fail_skandha_estimateUserOperationGas_15); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_15); + console.log(message.vali_skandha_estimateUserOperationGas_15); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_15); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without callGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, // without callGasLimit + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_16); + console.log(message.fail_skandha_estimateUserOperationGas_16); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_16); + console.log(message.vali_skandha_estimateUserOperationGas_16); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_16); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid verificationGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: data.invalid_hex, // invalid verificationGasLimit + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_17); + console.log(message.fail_skandha_estimateUserOperationGas_17); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_17); + console.log(message.vali_skandha_estimateUserOperationGas_17); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_17); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect verificationGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: data.incorrect_hex, // incorrect verificationGasLimit + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_18); + console.log(message.fail_skandha_estimateUserOperationGas_18); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_18); + console.log(message.vali_skandha_estimateUserOperationGas_18); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_18); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without verificationGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, // without verificationGasLimit + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_19); + console.log(message.fail_skandha_estimateUserOperationGas_19); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_19); + console.log(message.vali_skandha_estimateUserOperationGas_19); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_19); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid preVerificationGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: data.invalid_hex, // invalid preVerificationGas + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_20); + console.log(message.fail_skandha_estimateUserOperationGas_20); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_20); + console.log(message.vali_skandha_estimateUserOperationGas_20); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_20); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect preVerificationGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: data.incorrect_hex, // incorrect preVerificationGas + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_21); + console.log(message.fail_skandha_estimateUserOperationGas_21); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_21); + console.log(message.vali_skandha_estimateUserOperationGas_21); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_21); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without preVerificationGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, // without preVerificationGas + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_22); + console.log(message.fail_skandha_estimateUserOperationGas_22); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_22); + console.log(message.vali_skandha_estimateUserOperationGas_22); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_22); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid maxPriorityFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: data.invalid_hex, // invalid maxPriorityFeePerGas + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_23); + console.log(message.fail_skandha_estimateUserOperationGas_23); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_23); + console.log(message.vali_skandha_estimateUserOperationGas_23); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_23); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect maxPriorityFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: data.incorrect_hex, // incorrect maxPriorityFeePerGas + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_24); + console.log(message.fail_skandha_estimateUserOperationGas_24); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_24); + console.log(message.vali_skandha_estimateUserOperationGas_24); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_24); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without maxPriorityFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, // without maxPriorityFeePerGas + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_25); + console.log(message.fail_skandha_estimateUserOperationGas_25); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_25); + console.log(message.vali_skandha_estimateUserOperationGas_25); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_25); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid maxFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: data.invalid_hex, // invalid maxFeePerGas + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_26); + console.log(message.fail_skandha_estimateUserOperationGas_26); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_26); + console.log(message.vali_skandha_estimateUserOperationGas_26); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_26); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect maxFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: data.incorrect_hex, // incorrect maxFeePerGas + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_27); + console.log(message.fail_skandha_estimateUserOperationGas_27); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_27); + console.log(message.vali_skandha_estimateUserOperationGas_27); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_27); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without maxFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, // without maxFeePerGas + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_28); + console.log(message.fail_skandha_estimateUserOperationGas_28); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_28); + console.log(message.vali_skandha_estimateUserOperationGas_28); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_28); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid paymasterAndData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: data.invalid_hex, // invalid paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_29); + console.log(message.fail_skandha_estimateUserOperationGas_29); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_29); + console.log(message.vali_skandha_estimateUserOperationGas_29); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_29); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect paymasterAndData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: data.incorrect_hex, // incorrect paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_30); + console.log(message.fail_skandha_estimateUserOperationGas_30); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_30); + console.log(message.vali_skandha_estimateUserOperationGas_30); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_30); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without paymasterAndData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, // without paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_31); + console.log(message.fail_skandha_estimateUserOperationGas_31); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_31); + console.log(message.vali_skandha_estimateUserOperationGas_31); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_31); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid signature on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: data.invalid_hex, // invalid signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_32); + console.log(message.fail_skandha_estimateUserOperationGas_32); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.skandha_error_2)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_32); + console.log(message.vali_skandha_estimateUserOperationGas_32); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_32); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect signature on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: data.incorrect_hex, // incorrect signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_33); + console.log(message.fail_skandha_estimateUserOperationGas_33); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_33); + console.log(message.vali_skandha_estimateUserOperationGas_33); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_33); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without signature on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, // without signature + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_34); + console.log(message.fail_skandha_estimateUserOperationGas_34); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_34); + console.log(message.vali_skandha_estimateUserOperationGas_34); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_34); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.invalidEntryPointAddress, // invalid entry point address + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_35); + console.log(message.fail_skandha_estimateUserOperationGas_35); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_35); + console.log(message.vali_skandha_estimateUserOperationGas_35); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_35); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.incorrectentryPointAddress, // incorrect entry point address + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_36); + console.log(message.fail_skandha_estimateUserOperationGas_36); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_36); + console.log(message.vali_skandha_estimateUserOperationGas_36); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_36); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperationGas', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + ], // without entry point address + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_37); + console.log(message.fail_skandha_estimateUserOperationGas_37); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_37); + console.log(message.vali_skandha_estimateUserOperationGas_37); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_37); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'Eth_EstimateUserOperationGas', // invalid method name + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_38); + console.log(message.fail_skandha_estimateUserOperationGas_38); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_38); + console.log(message.vali_skandha_estimateUserOperationGas_38); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_38); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_estimateUserOperati', // incorrect method name + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_39); + console.log(message.fail_skandha_estimateUserOperationGas_39); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_39); + console.log(message.vali_skandha_estimateUserOperationGas_39); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_39); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_estimateUserOperationGas endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: '', // without method name + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_estimateUserOperationGas_40); + console.log(message.fail_skandha_estimateUserOperationGas_40); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_estimateUserOperationGas_40); + console.log(message.vali_skandha_estimateUserOperationGas_40); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_estimateUserOperationGas_40); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/skandha/eth_getUserOperationByHash.spec.js b/test/specs/testnet/paymasterAPIs/skandha/eth_getUserOperationByHash.spec.js new file mode 100644 index 0000000..abbe827 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/skandha/eth_getUserOperationByHash.spec.js @@ -0,0 +1,507 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import { PrimeSdk, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers } from 'ethers'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import helper from '../../../../utils/helper.js'; + +let testnetPrimeSdk; +let uoHash; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the get userOperation by hash endpoint of the skandha', function () { + it( + 'PRECONDITION: Perform the transfer native token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + //wait for the execution + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + try { + uoHash = await testnetPrimeSdk.send(op); + + console.log('UserOp hash:', uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + } + ); + + it( + 'SMOKE: Validate the eth_getUserOperationByHash endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationByHash', + params: [uoHash], + }, + header + ); + + // Add assertions + try { + assert.isNumber( + response.data.id, + message.vali_skandha_getUserOperationByHash_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.sender, + message.vali_skandha_getUserOperationByHash_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.nonce, + message.vali_skandha_getUserOperationByHash_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.initCode, + message.vali_skandha_getUserOperationByHash_initCode + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.callData, + message.vali_skandha_getUserOperationByHash_callData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.callGasLimit, + message.vali_skandha_getUserOperationByHash_callGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.verificationGasLimit, + message.vali_skandha_getUserOperationByHash_verificationGasLimit + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.preVerificationGas, + message.vali_skandha_getUserOperationByHash_preVerificationGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.maxFeePerGas, + message.vali_skandha_getUserOperationByHash_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.maxPriorityFeePerGas, + message.vali_skandha_getUserOperationByHash_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.paymasterAndData, + message.vali_skandha_getUserOperationByHash_paymasterAndData + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOperation.signature, + message.vali_skandha_getUserOperationByHash_signature + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.entryPoint, + message.vali_skandha_getUserOperationByHash_entryPoint + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_1); + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationByHash endpoint of the skandha with invalid hash on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationByHash', + params: [data.invalid_hash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationByHash_2); + console.log(message.fail_skandha_getUserOperationByHash_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationByHash_2); + console.log(message.vali_skandha_getUserOperationByHash_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationByHash endpoint of the skandha with incorrect hash on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationByHash', + params: [data.incorrect_hash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationByHash_3); + console.log(message.fail_skandha_getUserOperationByHash_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationByHash_3); + console.log(message.vali_skandha_getUserOperationByHash_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationByHash endpoint of the skandha withOUT hash on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationByHash', + params: [], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationByHash_4); + console.log(message.fail_skandha_getUserOperationByHash_4); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationByHash_4); + console.log(message.vali_skandha_getUserOperationByHash_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationByHash endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'Eth_GetUserOperationByHash', + params: [uoHash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationByHash_6); + console.log(message.fail_skandha_getUserOperationByHash_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationByHash_6); + console.log(message.vali_skandha_getUserOperationByHash_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationByHash endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperatio', + params: [uoHash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationByHash_6); + console.log(message.fail_skandha_getUserOperationByHash_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationByHash_6); + console.log(message.vali_skandha_getUserOperationByHash_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationByHash endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: '', + params: [uoHash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationByHash_7); + console.log(message.fail_skandha_getUserOperationByHash_7); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationByHash_7); + console.log(message.vali_skandha_getUserOperationByHash_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationByHash_7); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/skandha/eth_getUserOperationReceipt.spec.js b/test/specs/testnet/paymasterAPIs/skandha/eth_getUserOperationReceipt.spec.js new file mode 100644 index 0000000..43b8e96 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/skandha/eth_getUserOperationReceipt.spec.js @@ -0,0 +1,600 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import { PrimeSdk, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers } from 'ethers'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import helper from '../../../../utils/helper.js'; + +let testnetPrimeSdk; +let uoHash; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the get userOperation receipt endpoint of the skandha', function () { + it( + 'PRECONDITION: Perform the transfer native token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + //wait for the execution + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // get balance of the account address + let balance; + try { + balance = await testnetPrimeSdk.getNativeBalance(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getBalance_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + + // sign the UserOp and sending to the bundler + try { + uoHash = await testnetPrimeSdk.send(op); + + console.log('UserOp hash:', uoHash); + } catch (e) { + console.error(e); + const eString = e.toString(); + if (eString === 'Error') { + console.warn(message.skip_transaction_error); + addContext(test, message.skip_transaction_error); + test.skip(); + } else { + addContext(test, eString); + assert.fail(message.fail_submitTransaction_1); + } + } + + // get transaction hash + let userOpsReceipt = null; + try { + console.log('Waiting for transaction...'); + const timeout = Date.now() + 60000; // 1 minute timeout + while (userOpsReceipt == null && Date.now() < timeout) { + await helper.wait(5000); + userOpsReceipt = await testnetPrimeSdk.getUserOpReceipt(uoHash); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_getTransactionHash_1); + } + } + ); + + it( + 'SMOKE: Validate the eth_getUserOperationReceipt endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationReceipt', + params: [uoHash], + }, + header + ); + + // Add assertions + try { + assert.isNumber( + response.data.id, + message.vali_skandha_getUserOperationReceipt_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.userOpHash, + message.vali_skandha_getUserOperationReceipt_userOpHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.sender, + message.vali_skandha_getUserOperationReceipt_sender + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.nonce, + message.vali_skandha_getUserOperationReceipt_nonce + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.actualGasCost, + message.vali_skandha_getUserOperationReceipt_actualGasCost + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.actualGasUsed, + message.vali_skandha_getUserOperationReceipt_actualGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.to, + message.vali_skandha_getUserOperationReceipt_to + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.from, + message.vali_skandha_getUserOperationReceipt_from + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.transactionIndex, + message.vali_skandha_getUserOperationReceipt_transactionIndex + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.gasUsed, + message.vali_skandha_getUserOperationReceipt_gasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.logsBloom, + message.vali_skandha_getUserOperationReceipt_logsBloom + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.blockHash, + message.vali_skandha_getUserOperationReceipt_blockHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.transactionHash, + message.vali_skandha_getUserOperationReceipt_transactionHash + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.logs, + message.vali_skandha_getUserOperationReceipt_logs + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.blockNumber, + message.vali_skandha_getUserOperationReceipt_blockNumber + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.confirmations, + message.vali_skandha_getUserOperationReceipt_confirmations + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.cumulativeGasUsed, + message.vali_skandha_getUserOperationReceipt_cumulativeGasUsed + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.effectiveGasPrice, + message.vali_skandha_getUserOperationReceipt_effectiveGasPrice + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.status, + message.vali_skandha_getUserOperationReceipt_status + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.receipt.type, + message.vali_skandha_getUserOperationReceipt_type + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_1); + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationReceipt endpoint of the skandha with invalid hash on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationReceipt', + params: [data.invalid_hash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationReceipt_2); + console.log(message.fail_skandha_getUserOperationReceipt_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationReceipt_2); + console.log(message.vali_skandha_getUserOperationReceipt_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationReceipt endpoint of the skandha with incorrect hash on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationReceipt', + params: [data.incorrect_hash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationReceipt_3); + console.log(message.fail_skandha_getUserOperationReceipt_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationReceipt_3); + console.log(message.vali_skandha_getUserOperationReceipt_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationReceipt endpoint of the skandha withOUT hash on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationReceipt', + params: [], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationReceipt_4); + console.log(message.fail_skandha_getUserOperationReceipt_4); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationReceipt_4); + console.log(message.vali_skandha_getUserOperationReceipt_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationReceipt endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperationReceipt', + params: [uoHash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationReceipt_6); + console.log(message.fail_skandha_getUserOperationReceipt_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationReceipt_6); + console.log(message.vali_skandha_getUserOperationReceipt_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationReceipt endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: 'eth_getUserOperatio', + params: [uoHash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationReceipt_6); + console.log(message.fail_skandha_getUserOperationReceipt_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationReceipt_6); + console.log(message.vali_skandha_getUserOperationReceipt_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_getUserOperationReceipt endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + id: 3, + method: '', + params: [uoHash], + }, + header + ); + + addContext(test, message.fail_skandha_getUserOperationReceipt_7); + console.log(message.fail_skandha_getUserOperationReceipt_7); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getUserOperationReceipt_7); + console.log(message.vali_skandha_getUserOperationReceipt_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getUserOperationReceipt_7); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/skandha/eth_sendUserOperation.spec.js b/test/specs/testnet/paymasterAPIs/skandha/eth_sendUserOperation.spec.js new file mode 100644 index 0000000..9541171 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/skandha/eth_sendUserOperation.spec.js @@ -0,0 +1,2195 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import { PrimeSdk, EtherspotBundler } from '@etherspot/prime-sdk'; +import { ethers } from 'ethers'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import helper from '../../../../utils/helper.js'; + +let testnetPrimeSdk; +let sender; +let nonce; +let initCode; +let callData; +let callGasLimit; +let verificationGasLimit; +let maxFeePerGas; +let maxPriorityFeePerGas; +let paymasterAndData; +let preVerificationGas; +let signature; + +//define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the send user operation endpoint of the skandha', function () { + it( + 'PRECONDITION: Perform the transfer native token with valid details on the ' + + randomChainName + + ' network', + async function () { + var test = this; + + // initializating sdk + try { + testnetPrimeSdk = new PrimeSdk( + { privateKey: process.env.PRIVATE_KEY }, + { + chainId: Number(randomChainId), + bundlerProvider: new EtherspotBundler( + Number(randomChainId), + process.env.BUNDLER_API_KEY + ), + } + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_sdk_initialize); + } + + //wait for the execution + helper.wait(data.mediumTimeout); + + // clear the transaction batch + try { + await testnetPrimeSdk.clearUserOpsFromBatch(); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_clearTransaction_1); + } + + // add transactions to the batch + let transactionBatch; + try { + transactionBatch = await testnetPrimeSdk.addUserOpsToBatch({ + to: data.recipient, + value: ethers.utils.parseEther(data.value), + }); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addTransaction_1); + } + + // estimate transactions added to the batch and get the fee data for the UserOp + let op; + try { + op = await testnetPrimeSdk.estimate(); + + sender = op.sender; + nonce = op.nonce; + initCode = op.initCode; + callData = op.callData; + callGasLimit = op.callGasLimit; + verificationGasLimit = op.verificationGasLimit; + maxFeePerGas = op.maxFeePerGas; + maxPriorityFeePerGas = op.maxPriorityFeePerGas; + paymasterAndData = op.paymasterAndData; + preVerificationGas = op.preVerificationGas; + signature = op.signature; + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_estimateTransaction_1); + } + } + ); + + it( + 'SMOKE: Validate the eth_sendUserOperation endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: data.sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + // Add assertions + try { + assert.isNumber( + response.data.id, + message.vali_skandha_sendUserOperation_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result, + message.vali_skandha_sendUserOperation_result + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_1); + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid sender on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: data.invalidSender, // invalid sender + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_2); + console.log(message.fail_skandha_sendUserOperation_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_2); + console.log(message.vali_skandha_sendUserOperation_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect sender on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: data.incorrectSender, // incorrect sender + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_3); + console.log(message.fail_skandha_sendUserOperation_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_3); + console.log(message.vali_skandha_sendUserOperation_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without sender on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + nonce: nonce, // without sender + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_3); + console.log(message.fail_skandha_sendUserOperation_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_3); + console.log(message.vali_skandha_sendUserOperation_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid nonce on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: data.invalid_hex, // invalid nonce + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_5); + console.log(message.fail_skandha_sendUserOperation_5); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_5); + console.log(message.vali_skandha_sendUserOperation_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect nonce on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: data.incorrect_hex, // incorrect nonce + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_6); + console.log(message.fail_skandha_sendUserOperation_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_6); + console.log(message.vali_skandha_sendUserOperation_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without nonce on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, // without nonce + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_7); + console.log(message.fail_skandha_sendUserOperation_7); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_7); + console.log(message.vali_skandha_sendUserOperation_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid initCode on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: data.invalid_hex, // invalid initCode + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_8); + console.log(message.fail_skandha_sendUserOperation_8); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_8); + console.log(message.vali_skandha_sendUserOperation_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_8); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect initCode on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: data.incorrect_hex, // incorrect initCode + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_9); + console.log(message.fail_skandha_sendUserOperation_9); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_9); + console.log(message.vali_skandha_sendUserOperation_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_9); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without initCode on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, // without initCode + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_10); + console.log(message.fail_skandha_sendUserOperation_10); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_10); + console.log(message.vali_skandha_sendUserOperation_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_10); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid callData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: data.invalid_hex, // invalid callData + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_11); + console.log(message.fail_skandha_sendUserOperation_11); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_11); + console.log(message.vali_skandha_sendUserOperation_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_11); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect callData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: data.incorrect_hex, // incorrect callData + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_12); + console.log(message.fail_skandha_sendUserOperation_12); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_12); + console.log(message.vali_skandha_sendUserOperation_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_12); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without callData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, // without callData + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_13); + console.log(message.fail_skandha_sendUserOperation_13); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_13); + console.log(message.vali_skandha_sendUserOperation_13); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_13); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid callGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: data.invalid_hex, // invalid callGasLimit + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_14); + console.log(message.fail_skandha_sendUserOperation_14); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_14); + console.log(message.vali_skandha_sendUserOperation_14); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_14); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect callGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: data.incorrect_hex, // incorrect callGasLiit + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_15); + console.log(message.fail_skandha_sendUserOperation_15); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_15); + console.log(message.vali_skandha_sendUserOperation_15); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_15); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without callGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, // without callGasLimit + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_16); + console.log(message.fail_skandha_sendUserOperation_16); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_16); + console.log(message.vali_skandha_sendUserOperation_16); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_16); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid verificationGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: data.invalid_hex, // invalid verificationGasLimit + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_17); + console.log(message.fail_skandha_sendUserOperation_17); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_17); + console.log(message.vali_skandha_sendUserOperation_17); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_17); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect verificationGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: data.incorrect_hex, // incorrect verificationGasLimit + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_18); + console.log(message.fail_skandha_sendUserOperation_18); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_18); + console.log(message.vali_skandha_sendUserOperation_18); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_18); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without verificationGasLimit on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, // without verificationGasLimit + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_19); + console.log(message.fail_skandha_sendUserOperation_19); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_19); + console.log(message.vali_skandha_sendUserOperation_19); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_19); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid preVerificationGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: data.invalid_hex, // invalid preVerificationGas + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_20); + console.log(message.fail_skandha_sendUserOperation_20); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_20); + console.log(message.vali_skandha_sendUserOperation_20); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_20); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect preVerificationGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: data.incorrect_hex, // incorrect preVerificationGas + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_21); + console.log(message.fail_skandha_sendUserOperation_21); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_21); + console.log(message.vali_skandha_sendUserOperation_21); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_21); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without preVerificationGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, // without preVerificationGas + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_22); + console.log(message.fail_skandha_sendUserOperation_22); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_22); + console.log(message.vali_skandha_sendUserOperation_22); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_22); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid maxPriorityFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: data.invalid_hex, // invalid maxPriorityFeePerGas + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_23); + console.log(message.fail_skandha_sendUserOperation_23); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_23); + console.log(message.vali_skandha_sendUserOperation_23); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_23); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect maxPriorityFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: data.incorrect_hex, // incorrect maxPriorityFeePerGas + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_24); + console.log(message.fail_skandha_sendUserOperation_24); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_24); + console.log(message.vali_skandha_sendUserOperation_24); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_24); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without maxPriorityFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, // without maxPriorityFeePerGas + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_25); + console.log(message.fail_skandha_sendUserOperation_25); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_25); + console.log(message.vali_skandha_sendUserOperation_25); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_25); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid maxFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: data.invalid_hex, // invalid maxFeePerGas + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_26); + console.log(message.fail_skandha_sendUserOperation_26); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_26); + console.log(message.vali_skandha_sendUserOperation_26); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_26); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect maxFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: data.incorrect_hex, // incorrect maxFeePerGas + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_27); + console.log(message.fail_skandha_sendUserOperation_27); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_27); + console.log(message.vali_skandha_sendUserOperation_27); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_27); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without maxFeePerGas on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, // without maxFeePerGas + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_28); + console.log(message.fail_skandha_sendUserOperation_28); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_28); + console.log(message.vali_skandha_sendUserOperation_28); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_28); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid paymasterAndData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: data.invalid_hex, // invalid paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_29); + console.log(message.fail_skandha_sendUserOperation_29); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_29); + console.log(message.vali_skandha_sendUserOperation_29); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_29); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect paymasterAndData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: data.incorrect_hex, // incorrect paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_30); + console.log(message.fail_skandha_sendUserOperation_30); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_30); + console.log(message.vali_skandha_sendUserOperation_30); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_30); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without paymasterAndData on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, // without paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_31); + console.log(message.fail_skandha_sendUserOperation_31); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_31); + console.log(message.vali_skandha_sendUserOperation_31); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_31); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid signature on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: data.invalid_hex, // invalid signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_32); + console.log(message.fail_skandha_sendUserOperation_32); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_32); + console.log(message.vali_skandha_sendUserOperation_32); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_32); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect signature on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: data.incorrect_hex, // incorrect signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_33); + console.log(message.fail_skandha_sendUserOperation_33); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_33); + console.log(message.vali_skandha_sendUserOperation_33); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_33); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without signature on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, // without signature + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_34); + console.log(message.fail_skandha_sendUserOperation_34); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_34); + console.log(message.vali_skandha_sendUserOperation_34); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_34); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.invalidEntryPointAddress, // invalid entry point address + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_35); + console.log(message.fail_skandha_sendUserOperation_35); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_35); + console.log(message.vali_skandha_sendUserOperation_35); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_35); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.incorrectentryPointAddress, // incorrect entry point address + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_36); + console.log(message.fail_skandha_sendUserOperation_36); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_36); + console.log(message.vali_skandha_sendUserOperation_36); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_36); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOperation', + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + ], // without entry point address + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_37); + console.log(message.fail_skandha_sendUserOperation_37); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_37); + console.log(message.vali_skandha_sendUserOperation_37); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_37); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'Eth_SendUserOperation', // invalid method name + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_38); + console.log(message.fail_skandha_sendUserOperation_38); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_38); + console.log(message.vali_skandha_sendUserOperation_38); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_38); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: 'eth_sendUserOper', // incorrect method name + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_39); + console.log(message.fail_skandha_sendUserOperation_39); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_39); + console.log(message.vali_skandha_sendUserOperation_39); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_39); + } + } + } + ); + + it( + 'REGRESSION: Validate the eth_sendUserOperation endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + jsonrpc: '2.0', + method: '', // without method name + params: [ + { + sender: sender, + nonce: nonce, + initCode: initCode, + callData: callData, + callGasLimit: callGasLimit, + verificationGasLimit: verificationGasLimit, + preVerificationGas: preVerificationGas, + maxPriorityFeePerGas: maxPriorityFeePerGas, + maxFeePerGas: maxFeePerGas, + paymasterAndData: paymasterAndData, + signature: signature, + }, + data.entryPointAddress, + ], + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_sendUserOperation_40); + console.log(message.fail_skandha_sendUserOperation_40); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_sendUserOperation_40); + console.log(message.vali_skandha_sendUserOperation_40); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_sendUserOperation_40); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/skandha/skandha_config.spec.js b/test/specs/testnet/paymasterAPIs/skandha/skandha_config.spec.js new file mode 100644 index 0000000..11bebce --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/skandha/skandha_config.spec.js @@ -0,0 +1,199 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import { randomChainName } from '../../../../utils/sharedData_testnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the config endpoint of the skandha', function () { + it( + 'SMOKE: Validate the skandha_config endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_config', + id: 3, + }, + header + ); + + // Add assertions + try { + assert.isNumber(response.data.id, message.vali_skandha_config_id); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.flags, + message.vali_skandha_config_flags + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.entryPoints, + message.vali_skandha_config_entryPoints + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.beneficiary, + message.vali_skandha_config_beneficiary + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.relayers, + message.vali_skandha_config_relayers + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_config_1); + } + } + ); + + it( + 'REGRESSION: Validate the skandha_config endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'Skandha_Config', // invalid method name + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_config_2); + console.log(message.fail_skandha_config_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_config_2); + console.log(message.vali_skandha_config_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_config_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_config endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_configuration', // incorrect method name + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_config_3); + console.log(message.fail_skandha_config_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_config_3); + console.log(message.vali_skandha_config_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_config_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_config endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: '', // without method name + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_config_3); + console.log(message.fail_skandha_config_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_config_3); + console.log(message.vali_skandha_config_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_config_3); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/skandha/skandha_feeHistory.spec.js b/test/specs/testnet/paymasterAPIs/skandha/skandha_feeHistory.spec.js new file mode 100644 index 0000000..b634c7b --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/skandha/skandha_feeHistory.spec.js @@ -0,0 +1,301 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import { randomChainName } from '../../../../utils/sharedData_testnet.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the fee history endpoint of the skandha', function () { + it( + 'SMOKE: Validate the skandha_feeHistory endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_feeHistory', + id: 3, + params: [data.entryPointAddress, '10', 'latest'], + }, + header + ); + + // Add assertions + try { + assert.isNumber(response.data.id, message.vali_skandha_feeHistory_id); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.actualGasPrice, + message.vali_skandha_feeHistory_actualGasPrice + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.maxFeePerGas, + message.vali_skandha_feeHistory_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.maxPriorityFeePerGas, + message.vali_skandha_feeHistory_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_1); + } + } + ); + + it( + 'REGRESSION: Validate the skandha_feeHistory endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'Skandha_FeeHistory', // invalid method name + id: 3, + params: [data.entryPointAddress, '10', 'latest'], + }, + header + ); + + addContext(test, message.fail_skandha_feeHistory_2); + console.log(message.fail_skandha_feeHistory_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_feeHistory_2); + console.log(message.vali_skandha_feeHistory_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_feeHistory endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_getGas', // incorrect method name + id: 3, + params: [data.entryPointAddress, '10', 'latest'], + }, + header + ); + + addContext(test, message.fail_skandha_feeHistory_3); + console.log(message.fail_skandha_feeHistory_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_feeHistory_3); + console.log(message.vali_skandha_feeHistory_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_feeHistory endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: '', // without method name + id: 3, + params: [data.entryPointAddress, '10', 'latest'], + }, + header + ); + + addContext(test, message.fail_skandha_feeHistory_3); + console.log(message.fail_skandha_feeHistory_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_feeHistory_3); + console.log(message.vali_skandha_feeHistory_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_feeHistory endpoint of the skandha with invalid entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_feeHistory', + id: 3, + params: [data.invalidEntryPointAddress, '10', 'latest'], // invalid entry point address + }, + header + ); + + addContext(test, message.fail_skandha_feeHistory_5); + console.log(message.fail_skandha_feeHistory_5); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_feeHistory_5); + console.log(message.vali_skandha_feeHistory_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_feeHistory endpoint of the skandha with incorrect entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_feeHistory', + id: 3, + params: [data.incorrectentryPointAddress, '10', 'latest'], // incorrect entry point address + }, + header + ); + + addContext(test, message.fail_skandha_feeHistory_6); + console.log(message.fail_skandha_feeHistory_6); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_feeHistory_6); + console.log(message.vali_skandha_feeHistory_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_feeHistory endpoint of the skandha without entry point address on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_feeHistory', + id: 3, + params: ['10', 'latest'], // without entry point address + }, + header + ); + + addContext(test, message.fail_skandha_feeHistory_7); + console.log(message.fail_skandha_feeHistory_7); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_feeHistory_7); + console.log(message.vali_skandha_feeHistory_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_feeHistory_7); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/skandha/skandha_getGasPrice.spec.js b/test/specs/testnet/paymasterAPIs/skandha/skandha_getGasPrice.spec.js new file mode 100644 index 0000000..e4498e8 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/skandha/skandha_getGasPrice.spec.js @@ -0,0 +1,180 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import { randomChainName } from '../../../../utils/sharedData_testnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the get gas price endpoint of the skandha', function () { + it( + 'SMOKE: Validate the skandha_getGasPrice endpoint of the skandha with valid details on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_getGasPrice', + id: 3, + }, + header + ); + + // Add assertions + try { + assert.isNumber( + response.data.id, + message.vali_skandha_getGasPrice_id + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.maxPriorityFeePerGas, + message.vali_skandha_getGasPrice_maxPriorityFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + response.data.result.maxFeePerGas, + message.vali_skandha_getGasPrice_maxFeePerGas + ); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getGasPrice_1); + } + } + ); + + it( + 'REGRESSION: Validate the skandha_getGasPrice endpoint of the skandha with invalid method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'Skandha_GetGasPrice', // invalid method name + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_getGasPrice_2); + console.log(message.fail_skandha_getGasPrice_2); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getGasPrice_2); + console.log(message.vali_skandha_getGasPrice_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getGasPrice_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_getGasPrice endpoint of the skandha with incorrect method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: 'skandha_getGas', // incorrect method name + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_getGasPrice_3); + console.log(message.fail_skandha_getGasPrice_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getGasPrice_3); + console.log(message.vali_skandha_getGasPrice_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getGasPrice_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the skandha_getGasPrice endpoint of the skandha without method name on the ' + + randomChainName + + ' Network', + async function () { + var test = this; + + try { + const response = await axios.post( + `https://${randomChainName}-bundler.etherspot.io/`, + { + method: '', // without method name + id: 3, + }, + header + ); + + addContext(test, message.fail_skandha_getGasPrice_3); + console.log(message.fail_skandha_getGasPrice_3); + } catch (e) { + const error = e.response.error.message; + + if (error.includes(constant.skandha_error_1)) { + addContext(test, message.vali_skandha_getGasPrice_3); + console.log(message.vali_skandha_getGasPrice_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_skandha_getGasPrice_3); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/addPolicy.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/addPolicy.spec.js new file mode 100644 index 0000000..cd70090 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/addPolicy.spec.js @@ -0,0 +1,878 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + generateRandomString, + handleErrorValidation, +} from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +const randomName = generateRandomString(15); +const randomDescription = generateRandomString(15); + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate the add sponsorship policy api of Arka', function () { + it( + 'SMOKE: Validate the add policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: 5000, + globalMaximumNative: 1000, + globalMaximumOpCount: 1000, + perUserMaximumApplicable: true, + perUserMaximumUsd: 100, + perUserMaximumNative: 200, + perUserMaximumOpCount: 50, + perOpMaximumApplicable: true, + perOpMaximumUsd: 10, + perOpMaximumNative: 20, + }; + + // send POST request with headers and data + const response = await axios.post( + `${`${data.arka_fqdn}/${data.arka_addPolicy}`}`, + requestData, + { + headers, + } + ); + + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_addPolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_addPolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_addPolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody.id, message.vali_addPolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addPolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.invalid_sponsorAddress, // invalid wallet address + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_2); + assert.fail(message.vali_addPolicy_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_addPolicy_2, + 403 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.incorrect_sponsorAddress, // incorrect wallet address + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_3); + assert.fail(message.vali_addPolicy_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_addPolicy_3, + 403 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + name: randomName, // without wallet address + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_4); + assert.fail(message.vali_addPolicy_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_2, + message.fail_addPolicy_4, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with wallet address as a empty string on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: '', // empty string + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_5); + assert.fail(message.vali_addPolicy_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_2, + message.fail_addPolicy_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with wallet address as a only blank spaces on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: ' ', // blank spaces + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_6); + assert.fail(message.vali_addPolicy_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_addPolicy_6, + 403 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka without name on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + description: randomDescription, // without name + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_7); + assert.fail(message.vali_addPolicy_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_2, + message.fail_addPolicy_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with name as a empty string on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: '', // empty string + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_8); + assert.fail(message.vali_addPolicy_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_2, + message.fail_addPolicy_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with name as a only blank spaces on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: ' ', // blank spaces + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_9); + assert.fail(message.vali_addPolicy_9); + } catch (e) { + // TO DO: Update the constant message + // TO DO: Update the status code + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_addPolicy_9, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka without description on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, // without description + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_10); + assert.fail(message.vali_addPolicy_10); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_3, + message.fail_addPolicy_10, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with description as a empty string on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: '', // empty string + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_11); + assert.fail(message.vali_addPolicy_11); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_3, + message.fail_addPolicy_11, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with description as a only blank spaces on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: ' ', // blank spaces + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_12); + assert.fail(message.vali_addPolicy_12); + } catch (e) { + // TO DO: Update the constant message + // TO DO: Update the status code + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_addPolicy_12, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka without EPVersion on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + isPerpetual: true, // without EPVersion + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_13); + assert.fail(message.vali_addPolicy_13); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_4, + message.fail_addPolicy_13, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with invalid EPVersion on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EP_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_14); + assert.fail(message.vali_addPolicy_14); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_5, + message.fail_addPolicy_14, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with EPVersion empty array on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: [], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_addPolicy_15); + assert.fail(message.vali_addPolicy_15); + } catch (e) { + // TO DO: Update the constant message + // TO DO: Update the status code + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_5, + message.fail_addPolicy_15, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + invalid_headers, + } + ); + + addContext(test, message.vali_addPolicy_16); + assert.fail(message.vali_addPolicy_16); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_addPolicy_16, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_addPolicy_17); + assert.fail(message.vali_addPolicy_17); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_addPolicy_17, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the add policy endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_addPolicy_18); + assert.fail(message.vali_addPolicy_18); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_addPolicy_18, + 400 + ); + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/deletePolicy.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/deletePolicy.spec.js new file mode 100644 index 0000000..68cf898 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/deletePolicy.spec.js @@ -0,0 +1,418 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + generateRandomString, + handleErrorValidation, +} from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +const randomName = generateRandomString(15); +const randomDescription = generateRandomString(15); + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate the delete sponsorship policy api of Arka', function () { + let newId; + + it( + 'PRECONDITION: Validate the add policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: 5000, + globalMaximumNative: 1000, + globalMaximumOpCount: 1000, + perUserMaximumApplicable: true, + perUserMaximumUsd: 100, + perUserMaximumNative: 200, + perUserMaximumOpCount: 50, + perOpMaximumApplicable: true, + perOpMaximumUsd: 10, + perOpMaximumNative: 20, + }; + + // send POST request with headers and data + const response = await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + const responseBody = response.data; + newId = responseBody.id; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_addPolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_addPolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_addPolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody.id, message.vali_addPolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addPolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka with invalid id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${data.invalid_newId}`, + { + headers, + } + ); + + addContext(test, message.vali_deletePolicy_2); + assert.fail(message.vali_deletePolicy_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_deletePolicy_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka with incorrect id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${data.incorrect_newId}`, + { + headers, + } + ); + + addContext(test, message.vali_deletePolicy_3); + assert.fail(message.vali_deletePolicy_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_deletePolicy_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka without id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.delete(`${data.arka_deletePolicy}`, { + headers, + }); + + addContext(test, message.vali_deletePolicy_4); + assert.fail(message.vali_deletePolicy_4); + } catch (e) { + let error = e.message; + if (error.includes(constant.sponsorshipPolicy_walletAddress_12)) { + addContext(test, message.vali_deletePolicy_1); + console.log(message.vali_deletePolicy_1); + } else { + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deletePolicy_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka with zero value id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${data.zero_newId}`, + { + headers, + } + ); + + addContext(test, message.vali_deletePolicy_5); + assert.fail(message.vali_deletePolicy_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_deletePolicy_5, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka with negative id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/-${newId}`, + { + headers, + } + ); + + addContext(test, message.vali_deletePolicy_6); + assert.fail(message.vali_deletePolicy_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_deletePolicy_6, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${newId}`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_deletePolicy_7); + assert.fail(message.vali_deletePolicy_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_deletePolicy_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${newId}`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_deletePolicy_8); + assert.fail(message.vali_deletePolicy_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_deletePolicy_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the delete policy endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + await axios.put( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${newId}`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_deletePolicy_9); + assert.fail(message.vali_deletePolicy_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_deletePolicy_9, + 400 + ); + } + } + ); + + it( + 'SMOKE: Validate the delete policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.delete( + `${data.arka_fqdn}/${data.arka_deletePolicy}/${newId}`, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.message, + `Successfully deleted policy with id ${newId}`, + message.vali_deletePolicy_message + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deletePolicy_1); + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/disablePolicy.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/disablePolicy.spec.js new file mode 100644 index 0000000..6d52c58 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/disablePolicy.spec.js @@ -0,0 +1,457 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + generateRandomString, + handleErrorValidation, +} from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +const randomName = generateRandomString(15); +const randomDescription = generateRandomString(15); + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate the disable sponsorship policy api of Arka', function () { + let newId; + + it( + 'PRECONDITION: Validate the add policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: 5000, + globalMaximumNative: 1000, + globalMaximumOpCount: 1000, + perUserMaximumApplicable: true, + perUserMaximumUsd: 100, + perUserMaximumNative: 200, + perUserMaximumOpCount: 50, + perOpMaximumApplicable: true, + perOpMaximumUsd: 10, + perOpMaximumNative: 20, + }; + + // send POST request with headers and data + const response = await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + const responseBody = response.data; + newId = responseBody.id; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_addPolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_addPolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_addPolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody.id, message.vali_addPolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addPolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with invalid id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${data.invalid_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_disablePolicy_2); + assert.fail(message.vali_disablePolicy_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_disablePolicy_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with incorrect id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${data.incorrect_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_disablePolicy_3); + assert.fail(message.vali_disablePolicy_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_disablePolicy_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka without id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_disablePolicy_4); + assert.fail(message.vali_disablePolicy_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_disablePolicy_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with zero id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${data.zero_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_disablePolicy_5); + assert.fail(message.vali_disablePolicy_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_disablePolicy_5, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with negative id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/-${newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_disablePolicy_6); + assert.fail(message.vali_disablePolicy_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_disablePolicy_6, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${newId}`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_disablePolicy_7); + assert.fail(message.vali_disablePolicy_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_disablePolicy_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${newId}`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_disablePolicy_8); + assert.fail(message.vali_disablePolicy_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_disablePolicy_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${newId}`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_disablePolicy_9); + assert.fail(message.vali_disablePolicy_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_disablePolicy_9, + 400 + ); + } + } + ); + + it( + 'SMOKE: Validate the disable policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${newId}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.message, + `Successfully disabled policy with id ${newId}`, + message.vali_disablePolicy_message + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_disablePolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the disable policy endpoint of Arka with already disabled policy on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_disablePolicy}/${newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_disablePolicy_10); + assert.fail(message.vali_disablePolicy_10); + } catch (e) { + let error = e.response.data.error; + if (error.includes(constant.sponsorshipPolicy_walletAddress_14)) { + // validate the HTTP status code is 500 + expect(e.response.status).to.equal(500); + } else { + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_disablePolicy_10); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/enablePolicy.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/enablePolicy.spec.js new file mode 100644 index 0000000..4343740 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/enablePolicy.spec.js @@ -0,0 +1,457 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + generateRandomString, + handleErrorValidation, +} from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +const randomName = generateRandomString(15); +const randomDescription = generateRandomString(15); + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate the enable sponsorship policy api of Arka', function () { + let newId; + + it( + 'PRECONDITION: Validate the add policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: false, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: 5000, + globalMaximumNative: 1000, + globalMaximumOpCount: 1000, + perUserMaximumApplicable: true, + perUserMaximumUsd: 100, + perUserMaximumNative: 200, + perUserMaximumOpCount: 50, + perOpMaximumApplicable: true, + perOpMaximumUsd: 10, + perOpMaximumNative: 20, + }; + + // send POST request with headers and data + const response = await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + const responseBody = response.data; + newId = responseBody.id; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_addPolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_addPolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_addPolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody.id, message.vali_addPolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addPolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with invalid id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${data.invalid_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_enablePolicy_2); + assert.fail(message.vali_enablePolicy_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_enablePolicy_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with incorrect id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${data.incorrect_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_enablePolicy_3); + assert.fail(message.vali_enablePolicy_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_enablePolicy_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka without id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_enablePolicy_4); + assert.fail(message.vali_enablePolicy_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_enablePolicy_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with zero id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${data.zero_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_enablePolicy_5); + assert.fail(message.vali_enablePolicy_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_enablePolicy_5, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with negative id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/-${newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_enablePolicy_6); + assert.fail(message.vali_enablePolicy_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_enablePolicy_6, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${newId}`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_enablePolicy_7); + assert.fail(message.vali_enablePolicy_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_enablePolicy_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${newId}`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_enablePolicy_8); + assert.fail(message.vali_enablePolicy_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_enablePolicy_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${newId}`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_enablePolicy_9); + assert.fail(message.vali_enablePolicy_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_enablePolicy_9, + 400 + ); + } + } + ); + + it( + 'SMOKE: Validate the enable policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send DELETE request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${newId}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.message, + `Successfully enabled policy with id ${newId}`, + message.vali_enablePolicy_message + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_enablePolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the enable policy endpoint of Arka with already enabled policy on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send PUT request with headers + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_enablePolicy}/${newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_enablePolicy_10); + assert.fail(message.vali_enablePolicy_10); + } catch (e) { + let error = e.response.data.error; + if (error.includes(constant.sponsorshipPolicy_walletAddress_15)) { + // validate the HTTP status code is 500 + expect(e.response.status).to.equal(500); + } else { + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_enablePolicy_10); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddress.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddress.spec.js new file mode 100644 index 0000000..a738468 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddress.spec.js @@ -0,0 +1,288 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { randomChainName } from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the latest sponsorship policy api using wallet address', function () { + it( + 'SMOKE: Fetching the latest policy of particular wallet address endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/latest`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.isNotEmpty( + responseBody.walletAddress, + message.vali_latestPolicyWalletAddress_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.name, + message.vali_latestPolicyWalletAddress_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.description, + message.vali_latestPolicyWalletAddress_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody.id, + message.vali_latestPolicyWalletAddress_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_latestPolicyWalletAddress_1); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddress_2); + assert.fail(message.vali_latestPolicyWalletAddress_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddress_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddress_3); + assert.fail(message.vali_latestPolicyWalletAddress_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddress_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddress_4); + assert.fail(message.vali_latestPolicyWalletAddress_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddress_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/latest`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddress_5); + assert.fail(message.vali_latestPolicyWalletAddress_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddress_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/latest`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddress_6); + assert.fail(message.vali_latestPolicyWalletAddress_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddress_6, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/latest`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddress_7); + assert.fail(message.vali_latestPolicyWalletAddress_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddress_7, + 400 + ); + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressChainid.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressChainid.spec.js new file mode 100644 index 0000000..a62d60e --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressChainid.spec.js @@ -0,0 +1,413 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomInvalidChainId, + randomIncorrectChainId, +} from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the latest sponsorship policy api using wallet address and chainid', function () { + it( + 'SMOKE: Fetching the latest policy of particular wallet address and chain id endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.isNotEmpty( + responseBody.walletAddress, + message.vali_latestPolicyWalletAddressandChainid_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.name, + message.vali_latestPolicyWalletAddressandChainid_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.description, + message.vali_latestPolicyWalletAddressandChainid_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody.id, + message.vali_latestPolicyWalletAddressandChainid_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_latestPolicyWalletAddressandChainid_1); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_2); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandChainid_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_3); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandChainid_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_4); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_latestPolicyWalletAddressandChainid_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka with invalid chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/${randomInvalidChainId}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_5); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandChainid_5, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka with incorrect chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/${randomIncorrectChainId}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_6); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandChainid_6, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka without chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_7); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_latestPolicyWalletAddressandChainid_7, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka without wallet address and chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_8); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandChainid_8, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/${randomChainId}/latest`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_9); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressandChainid_9, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/${randomChainId}/latest`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_10); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_10); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressandChainid_10, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and chainid endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/chain-id/${randomChainId}/latest`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandChainid_11); + assert.fail(message.vali_latestPolicyWalletAddressandChainid_11); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressandChainid_11, + 400 + ); + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersion.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersion.spec.js new file mode 100644 index 0000000..9594397 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersion.spec.js @@ -0,0 +1,408 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { randomChainName } from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the latest sponsorship policy api using wallet address and ep version', function () { + it( + 'SMOKE: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/latest`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.isNotEmpty( + responseBody.walletAddress, + message.vali_latestPolicyWalletAddressandEPVersion_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.name, + message.vali_latestPolicyWalletAddressandEPVersion_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.description, + message.vali_latestPolicyWalletAddressandEPVersion_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody.id, + message.vali_latestPolicyWalletAddressandEPVersion_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_latestPolicyWalletAddressandEPVersion_1); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}/ep-version/${data.ep07}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_2); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandEPVersion_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}/ep-version/${data.ep07}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_3); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandEPVersion_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/ep-version/${data.ep07}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_4); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_latestPolicyWalletAddressandEPVersion_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka with invalid entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.invalid_epversion}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_5); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_latestPolicyWalletAddressandEPVersion_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka with incorrect entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.incorrect_epversion}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_6); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_latestPolicyWalletAddressandEPVersion_6, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka without entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_7); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_latestPolicyWalletAddressandEPVersion_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka without wallet address and entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/latest`, + null, + { + headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_8); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressandEPVersion_8, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/latest`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_9); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressandEPVersion_9, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/latest`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_10); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_10); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressandEPVersion_10, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address and entry point version endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/latest`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_latestPolicyWalletAddressandEPVersion_11); + assert.fail(message.vali_latestPolicyWalletAddressandEPVersion_11); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressandEPVersion_11, + 400 + ); + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersionChainid.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersionChainid.spec.js new file mode 100644 index 0000000..349758a --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getLatestPolicyWalletAddressEPVersionChainid.spec.js @@ -0,0 +1,570 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomInvalidChainId, + randomIncorrectChainId, +} from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the latest sponsorship policy api using wallet address, ep version, and chainid', function () { + it( + 'SMOKE: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.isNotEmpty( + responseBody.walletAddress, + message.vali_latestPolicyWalletAddressEPVersionandChainid_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.name, + message.vali_latestPolicyWalletAddressEPVersionandChainid_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody.description, + message.vali_latestPolicyWalletAddressEPVersionandChainid_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody.id, + message.vali_latestPolicyWalletAddressEPVersionandChainid_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail( + message.fail_latestPolicyWalletAddressEPVersionandChainid_1 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_2 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_2 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressEPVersionandChainid_2, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_3 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_3 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressEPVersionandChainid_3, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_4 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_4 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_latestPolicyWalletAddressEPVersionandChainid_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.invalid_epversion}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_5 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_5 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_latestPolicyWalletAddressEPVersionandChainid_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.incorrect_epversion}/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_6 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_6 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_latestPolicyWalletAddressEPVersionandChainid_6, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka without entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/chain-id/${randomChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_7 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_7 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_latestPolicyWalletAddressEPVersionandChainid_7, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomInvalidChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_8 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_8 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressEPVersionandChainid_8, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomIncorrectChainId}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_9 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_9 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressEPVersionandChainid_9, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka without chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_10 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_10 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.invalid_data, + message.fail_latestPolicyWalletAddressEPVersionandChainid_10, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka without wallet address and entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/latest`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_11 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_11 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_latestPolicyWalletAddressEPVersionandChainid_11, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + { + invalid_headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_12 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_12 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressEPVersionandChainid_12, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + { + incorrect_headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_13 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_13 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressEPVersionandChainid_13, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the latest policy of particular wallet address, entry point version and chain id endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}/latest`, + { + withoutapikey_headers, + } + ); + + addContext( + test, + message.vali_latestPolicyWalletAddressEPVersionandChainid_14 + ); + assert.fail( + message.vali_latestPolicyWalletAddressEPVersionandChainid_14 + ); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_latestPolicyWalletAddressEPVersionandChainid_14, + 400 + ); + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicy.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicy.spec.js new file mode 100644 index 0000000..66cd449 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicy.spec.js @@ -0,0 +1,185 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { randomChainName } from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate records of the sponsorship policy api', function () { + it( + 'SMOKE: Validate the policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policy}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + for (let i = 0; i < responseBody.length; i++) { + try { + assert.isNotEmpty( + responseBody[i].walletAddress, + message.vali_policy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty(responseBody[i].name, message.vali_policy_name); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].description, + message.vali_policy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody[i].id, message.vali_policy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_policy_1); + } + } + ); + + it( + 'REGRESSION: Validate the policy endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}`, { + invalid_headers, + }); + + addContext(test, message.vali_policy_2); + assert.fail(message.vali_policy_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policy_2, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the policy endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}`, { + incorrect_headers, + }); + + addContext(test, message.vali_policy_3); + assert.fail(message.vali_policy_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policy_3, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the policy endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}`, { + withoutapikey_headers, + }); + + addContext(test, message.vali_policy_4); + assert.fail(message.vali_policy_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policy_4, + 400 + ); + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyId.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyId.spec.js new file mode 100644 index 0000000..eb8c3bd --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyId.spec.js @@ -0,0 +1,448 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + generateRandomString, + handleErrorValidation, +} from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +const randomName = generateRandomString(15); +const randomDescription = generateRandomString(15); + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the sponsorship policy api using id', function () { + let newId; + + it( + 'PRECONDITION: Validate the add policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: 5000, + globalMaximumNative: 1000, + globalMaximumOpCount: 1000, + perUserMaximumApplicable: true, + perUserMaximumUsd: 100, + perUserMaximumNative: 200, + perUserMaximumOpCount: 50, + perOpMaximumApplicable: true, + perOpMaximumUsd: 10, + perOpMaximumNative: 20, + }; + + // send POST request with headers and data + const response = await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + + requestData, + { + headers, + } + ); + const responseBody = response.data; + newId = responseBody.id; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_addPolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_addPolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_addPolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody.id, message.vali_addPolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addPolicy_1); + } + } + ); + + it( + 'SMOKE: Fetching the policy of particular id endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policy}/${newId}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_policyId_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_policyId_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_policyId_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal(responseBody.id, newId, message.vali_policyId_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_policyId_1); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka with invalid id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policy}/${data.invalid_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyId_2); + assert.fail(message.vali_policyId_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyId_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka with incorrect id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policy}/${data.incorrect_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyId_3); + assert.fail(message.vali_policyId_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyId_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka without id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}`, null, { + headers, + }); + + addContext(test, message.vali_policyId_4); + assert.fail(message.vali_policyId_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyId_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka with zero id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policy}/${data.zero_newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyId_5); + assert.fail(message.vali_policyId_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyId_5, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka with negative id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policy}/-${newId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyId_6); + assert.fail(message.vali_policyId_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyId_6, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}/${newId}`, { + invalid_headers, + }); + + addContext(test, message.vali_policyId_7); + assert.fail(message.vali_policyId_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyId_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}/${newId}`, { + incorrect_headers, + }); + + addContext(test, message.vali_policyId_8); + assert.fail(message.vali_policyId_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyId_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular id endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_fqdn}/${data.arka_policy}/${newId}`, { + withoutapikey_headers, + }); + + addContext(test, message.vali_policyId_9); + assert.fail(message.vali_policyId_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyId_9, + 400 + ); + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddress.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddress.spec.js new file mode 100644 index 0000000..024dcc0 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddress.spec.js @@ -0,0 +1,286 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { randomChainName } from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the sponsorship policy api using walet address', function () { + it( + 'SMOKE: Fetching the policy of particular wallet address endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + for (let i = 0; i < responseBody.length; i++) { + try { + assert.isNotEmpty( + responseBody[i].walletAddress, + message.vali_policyWalletAddress_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].name, + message.vali_policyWalletAddress_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].description, + message.vali_policyWalletAddress_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody[i].id, + message.vali_policyWalletAddress_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_policyWalletAddress_1); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddress_2); + assert.fail(message.vali_policyWalletAddress_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddress_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddress_3); + assert.fail(message.vali_policyWalletAddress_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddress_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_policyWalletAddress}`, null, { + headers, + }); + + addContext(test, message.vali_policyWalletAddress_4); + assert.fail(message.vali_policyWalletAddress_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_7, + message.fail_policyWalletAddress_4, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_policyWalletAddress_5); + assert.fail(message.vali_policyWalletAddress_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddress_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_policyWalletAddress_6); + assert.fail(message.vali_policyWalletAddress_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddress_6, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_policyWalletAddress_7); + assert.fail(message.vali_policyWalletAddress_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddress_7, + 400 + ); + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersion.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersion.spec.js new file mode 100644 index 0000000..a98c9ed --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersion.spec.js @@ -0,0 +1,406 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { randomChainName } from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the sponsorship policy api using walet address and ep version', function () { + it( + 'SMOKE: Fetching the policy of particular wallet address and entry point version endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + for (let i = 0; i < responseBody.length; i++) { + try { + assert.isNotEmpty( + responseBody[i].walletAddress, + message.vali_policyWalletAddressandEPVersion_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].name, + message.vali_policyWalletAddressandEPVersion_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].description, + message.vali_policyWalletAddressandEPVersion_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody[i].id, + message.vali_policyWalletAddressandEPVersion_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_policyWalletAddressandEPVersion_1); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}/ep-version/${data.ep07}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_2); + assert.fail(message.vali_policyWalletAddressandEPVersion_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddressandEPVersion_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}/ep-version/${data.ep07}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_3); + assert.fail(message.vali_policyWalletAddressandEPVersion_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddressandEPVersion_3, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/ep-version/${data.ep07}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_4); + assert.fail(message.vali_policyWalletAddressandEPVersion_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_policyWalletAddressandEPVersion_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka with invalid entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.invalid_epversion}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_5); + assert.fail(message.vali_policyWalletAddressandEPVersion_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_policyWalletAddressandEPVersion_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka with incorrect entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.incorrect_epversion}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_6); + assert.fail(message.vali_policyWalletAddressandEPVersion_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_policyWalletAddressandEPVersion_6, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka without entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_7); + assert.fail(message.vali_policyWalletAddressandEPVersion_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_policyWalletAddressandEPVersion_7, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka without wallet address and entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_policyWalletAddress}`, null, { + headers, + }); + + addContext(test, message.vali_policyWalletAddressandEPVersion_8); + assert.fail(message.vali_policyWalletAddressandEPVersion_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_7, + message.fail_policyWalletAddressandEPVersion_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}`, + { + invalid_headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_9); + assert.fail(message.vali_policyWalletAddressandEPVersion_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddressandEPVersion_9, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}`, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_10); + assert.fail(message.vali_policyWalletAddressandEPVersion_10); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddressandEPVersion_10, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address and entry point version endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}`, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_policyWalletAddressandEPVersion_11); + assert.fail(message.vali_policyWalletAddressandEPVersion_11); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddressandEPVersion_11, + 400 + ); + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersionChainid.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersionChainid.spec.js new file mode 100644 index 0000000..8f48aff --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/getPolicyWalletAddressEPVersionChainid.spec.js @@ -0,0 +1,516 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { handleErrorValidation } from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, + randomInvalidChainId, + randomIncorrectChainId, +} from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the sponsorship policy api using walet address, ep version, and chainid', function () { + it( + 'SMOKE: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + const response = await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + for (let i = 0; i < responseBody.length; i++) { + try { + assert.isNotEmpty( + responseBody[i].walletAddress, + message.vali_policyWalletAddressEPVersionandChainid_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].name, + message.vali_policyWalletAddressEPVersionandChainid_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNotEmpty( + responseBody[i].description, + message.vali_policyWalletAddressEPVersionandChainid_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber( + responseBody[i].id, + message.vali_policyWalletAddressEPVersionandChainid_id + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_policyWalletAddressEPVersionandChainid_1); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.invalid_sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_2); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddressEPVersionandChainid_2, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.incorrect_sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_3); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddressandEPVersion_11, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_4); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_policyWalletAddressEPVersionandChainid_4, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.invalid_epversion}/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_5); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_policyWalletAddressEPVersionandChainid_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.incorrect_epversion}/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_6); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_11, + message.fail_policyWalletAddressEPVersionandChainid_6, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka without entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/chain-id/${randomChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_7); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_policyWalletAddressEPVersionandChainid_7, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomInvalidChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_8); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddressEPVersionandChainid_8, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomIncorrectChainId}`, + null, + { + headers, + } + ); + + addContext(test, message.vali_policyWalletAddressEPVersionandChainid_9); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_policyWalletAddressEPVersionandChainid_9, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka without chainid on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id`, + null, + { + headers, + } + ); + + addContext( + test, + message.vali_policyWalletAddressEPVersionandChainid_10 + ); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_10); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_9, + message.fail_policyWalletAddressEPVersionandChainid_10, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka without wallet address and entry point version on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get(`${data.arka_policyWalletAddress}`, null, { + headers, + }); + + addContext( + test, + message.vali_policyWalletAddressEPVersionandChainid_11 + ); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_11); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_7, + message.fail_policyWalletAddressEPVersionandChainid_11, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + { + invalid_headers, + } + ); + + addContext( + test, + message.vali_policyWalletAddressEPVersionandChainid_12 + ); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_12); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddressEPVersionandChainid_12, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + { + incorrect_headers, + } + ); + + addContext( + test, + message.vali_policyWalletAddressEPVersionandChainid_13 + ); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_13); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddressEPVersionandChainid_13, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Fetching the policy of particular wallet address, entry point version and chain id endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // send GET request with headers + await axios.get( + `${data.arka_fqdn}/${data.arka_policyWalletAddress}/${data.sponsorAddress}/ep-version/${data.ep07}/chain-id/${randomChainId}`, + { + withoutapikey_headers, + } + ); + + addContext( + test, + message.vali_policyWalletAddressEPVersionandChainid_14 + ); + assert.fail(message.vali_policyWalletAddressEPVersionandChainid_14); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_policyWalletAddressEPVersionandChainid_14, + 400 + ); + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/updatePolicy.spec.js b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/updatePolicy.spec.js new file mode 100644 index 0000000..13a81f0 --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/sponsorshipPolicy/updatePolicy.spec.js @@ -0,0 +1,1786 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { expect, assert } from 'chai'; +import axios from 'axios'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + generateRandomString, + handleErrorValidation, +} from '../../../../utils/baseTest.js'; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +const randomName = generateRandomString(15); +const randomDescription = generateRandomString(15); +const updatedRandomName = generateRandomString(15); +const updatedRandomDescription = generateRandomString(15); + +// define headers with valid details +const headers = { + 'Content-Type': 'application/json', + apikey: process.env.API_KEY_ARKA, +}; + +// define headers with invalid details +const invalid_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INVALID_API_KEY_ARKA, +}; + +// define headers with incorrect details +const incorrect_headers = { + 'Content-Type': 'application/json', + apikey: process.env.INCORRECT_API_KEY_ARKA, +}; + +// define headers without apikey details +const withoutapikey_headers = { + 'Content-Type': 'application/json', +}; + +describe('Validate record of the sponsorship policy api using walet address', function () { + let newId; + + it( + 'PRECONDITION: Validate the add policy endpoint of Arka on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: false, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: 5000, + globalMaximumNative: 1000, + globalMaximumOpCount: 1000, + perUserMaximumApplicable: true, + perUserMaximumUsd: 100, + perUserMaximumNative: 200, + perUserMaximumOpCount: 50, + perOpMaximumApplicable: true, + perOpMaximumUsd: 10, + perOpMaximumNative: 20, + }; + + // send POST request with headers and data + const response = await axios.post( + `${data.arka_fqdn}/${data.arka_addPolicy}`, + requestData, + { + headers, + } + ); + const responseBody = response.data; + newId = responseBody.id; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_addPolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + randomName, + message.vali_addPolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + randomDescription, + message.vali_addPolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNumber(responseBody.id, message.vali_addPolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addPolicy_1); + } + } + ); + + it( + 'SMOKE: Validate the update policy endpoint of Arka on the ' + + randomChainName + + ' network: case 1', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: updatedRandomName, + description: updatedRandomDescription, + isPublic: true, + isEnabled: false, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_07'], + isPerpetual: true, + globalMaximumApplicable: false, + globalMaximumUsd: data.globalMaximumUsd, + globalMaximumNative: data.globalMaximumNative, + globalMaximumOpCount: data.globalMaximumOpCount, + perUserMaximumApplicable: false, + perUserMaximumUsd: data.perUserMaximumUsd, + perUserMaximumNative: data.perUserMaximumNative, + perUserMaximumOpCount: data.perUserMaximumOpCount, + perOpMaximumApplicable: false, + perOpMaximumUsd: data.perOpMaximumUsd, + perOpMaximumNative: data.perOpMaximumUsd, + }; + + // send POST request with headers and data + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal(responseBody.id, newId, message.vali_updatePolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_updatePolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + updatedRandomName, + message.vali_updatePolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + updatedRandomDescription, + message.vali_updatePolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + // ********************** + + try { + assert.isTrue( + responseBody.isPublic, + message.vali_updatePolicy_isPublic + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isFalse( + responseBody.isEnabled, + message.vali_updatePolicy_isEnabled + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.isApplicableToAllNetworks, + message.vali_updatePolicy_isApplicableToAllNetworks + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.enabledChains[0], + randomChainId, + message.vali_updatePolicy_enabledChains + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.supportedEPVersions[0], + data.ep07, + message.vali_updatePolicy_supportedEPVersions + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.isPerpetual, + message.vali_updatePolicy_isPerpetual + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isFalse( + responseBody.globalMaximumApplicable, + message.vali_updatePolicy_globalMaximumApplicable + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.globalMaximumUsd, + message.vali_updatePolicy_globalMaximumUsd + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.globalMaximumNative, + message.vali_updatePolicy_globalMaximumNative + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.globalMaximumOpCount, + message.vali_updatePolicy_globalMaximumOpCount + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isFalse( + responseBody.perUserMaximumApplicable, + message.vali_updatePolicy_perUserMaximumApplicable + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.perUserMaximumUsd, + message.vali_updatePolicy_perUserMaximumUsd + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.perUserMaximumNative, + message.vali_updatePolicy_perUserMaximumNative + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.perUserMaximumOpCount, + message.vali_updatePolicy_perUserMaximumOpCount + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isFalse( + responseBody.perOpMaximumApplicable, + message.vali_updatePolicy_perOpMaximumApplicable + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.perOpMaximumUsd, + message.vali_updatePolicy_perOpMaximumUsd + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.perOpMaximumNative, + message.vali_updatePolicy_perOpMaximumNative + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.addressAllowList, + message.vali_updatePolicy_addressAllowList + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.addressBlockList, + message.vali_updatePolicy_addressBlockList + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_updatePolicy_1); + } + } + ); + + it( + 'SMOKE: Validate the update policy endpoint of Arka on the ' + + randomChainName + + ' network: case 2', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: updatedRandomName, + description: updatedRandomDescription, + isPublic: true, + isEnabled: false, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_07'], + isPerpetual: true, + globalMaximumApplicable: true, + globalMaximumUsd: data.globalMaximumUsd, + globalMaximumNative: data.globalMaximumNative, + globalMaximumOpCount: data.globalMaximumOpCount, + perUserMaximumApplicable: true, + perUserMaximumUsd: data.perUserMaximumUsd, + perUserMaximumNative: data.perUserMaximumNative, + perUserMaximumOpCount: data.perUserMaximumOpCount, + perOpMaximumApplicable: true, + perOpMaximumUsd: data.perOpMaximumUsd, + perOpMaximumNative: data.perOpMaximumUsd, + }; + + // send POST request with headers and data + const response = await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + const responseBody = response.data; + + // perform assertions + expect(response.status).to.equal(200); + + try { + assert.equal(responseBody.id, newId, message.vali_updatePolicy_id); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.walletAddress, + data.sponsorAddress, + message.vali_updatePolicy_walletAddress + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.name, + updatedRandomName, + message.vali_updatePolicy_name + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.description, + updatedRandomDescription, + message.vali_updatePolicy_description + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.isPublic, + message.vali_updatePolicy_isPublic + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isFalse( + responseBody.isEnabled, + message.vali_updatePolicy_isEnabled + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.isApplicableToAllNetworks, + message.vali_updatePolicy_isApplicableToAllNetworks + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.enabledChains[0], + randomChainId, + message.vali_updatePolicy_enabledChains + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.supportedEPVersions[0], + data.ep07, + message.vali_updatePolicy_supportedEPVersions + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.isPerpetual, + message.vali_updatePolicy_isPerpetual + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.globalMaximumApplicable, + message.vali_updatePolicy_globalMaximumApplicable + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.globalMaximumUsd, + data.globalMaximumUsd, + message.vali_updatePolicy_globalMaximumUsd + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.globalMaximumNative, + data.globalMaximumNative, + message.vali_updatePolicy_globalMaximumNative + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.globalMaximumOpCount, + data.globalMaximumOpCount, + message.vali_updatePolicy_globalMaximumOpCount + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.perUserMaximumApplicable, + message.vali_updatePolicy_perUserMaximumApplicable + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.perUserMaximumUsd, + data.perUserMaximumUsd, + message.vali_updatePolicy_perUserMaximumUsd + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.perUserMaximumNative, + data.perUserMaximumNative, + message.vali_updatePolicy_perUserMaximumNative + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.perUserMaximumOpCount, + data.perUserMaximumOpCount, + message.vali_updatePolicy_perUserMaximumOpCount + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isTrue( + responseBody.perOpMaximumApplicable, + message.vali_updatePolicy_perOpMaximumApplicable + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.perOpMaximumUsd, + data.perOpMaximumUsd, + message.vali_updatePolicy_perOpMaximumUsd + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.equal( + responseBody.perOpMaximumNative, + data.perOpMaximumNative, + message.vali_updatePolicy_perOpMaximumNative + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.addressAllowList, + message.vali_updatePolicy_addressAllowList + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + + try { + assert.isNull( + responseBody.addressBlockList, + message.vali_updatePolicy_addressBlockList + ); + } catch (e) { + addContext(test, e); + console.error(e); + const eString = e.toString(); + addContext(test, eString); + } + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_updatePolicy_1); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with invalid wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.invalid_sponsorAddress, // invalid wallet address + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_2); + assert.fail(message.vali_updatePolicy_2); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_updatePolicy_2, + 403 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with incorrect wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.incorrect_sponsorAddress, // incorrect wallet address + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_3); + assert.fail(message.vali_updatePolicy_3); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_updatePolicy_3, + 403 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka without wallet address on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + name: randomName, // without wallet address + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_4); + assert.fail(message.vali_updatePolicy_4); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_2, + message.fail_updatePolicy_4, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with wallet address as a empty string on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: '', // empty string + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_5); + assert.fail(message.vali_updatePolicy_5); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_2, + message.fail_updatePolicy_5, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with wallet address as a only blank spaces on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: ' ', // blank spaces + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_6); + assert.fail(message.vali_updatePolicy_6); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_1, + message.fail_updatePolicy_6, + 403 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka without name on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + description: randomDescription, // without name + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_7); + assert.fail(message.vali_updatePolicy_7); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_7, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with name as a empty string on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: '', // empty string + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_8); + assert.fail(message.vali_updatePolicy_8); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_8, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with name as a only blank spaces on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: ' ', // blank spaces + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_9); + assert.fail(message.vali_updatePolicy_9); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_9, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka without description on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: randomName, // without description + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_10); + assert.fail(message.vali_updatePolicy_10); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_10, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with description as a empty string on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: randomName, + description: '', // empty string + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_11); + assert.fail(message.vali_updatePolicy_11); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_11, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with description as a only blank spaces on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: randomName, + description: ' ', // blank spaces + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_12); + assert.fail(message.vali_updatePolicy_12); + } catch (e) { + // TO DO: Update the constant message + // TO DO: Update the status code + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_updatePolicy_12, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka without EPVersion on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + isPerpetual: true, // without EPVersion + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_13); + assert.fail(message.vali_updatePolicy_13); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_13, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with invalid EPVersion on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EP_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_14); + assert.fail(message.vali_updatePolicy_14); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_14, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with EPVersion empty array on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: [], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_15); + assert.fail(message.vali_updatePolicy_15); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_10, + message.fail_updatePolicy_15, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with invalid id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: data.invalid_newId, // invalid id + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_16); + assert.fail(message.vali_updatePolicy_16); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_updatePolicy_16, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with incorrect id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: data.incorrect_newId, // incorrect id + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_17); + assert.fail(message.vali_updatePolicy_17); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_updatePolicy_17, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka without id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_18); + assert.fail(message.vali_updatePolicy_18); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_7, + message.fail_updatePolicy_18, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with zero value id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: data.zero_newId, // zero value id + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_19); + assert.fail(message.vali_updatePolicy_19); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_7, + message.fail_updatePolicy_19, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with negative value id on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: `-${newId}`, // negative id + walletAddress: data.sponsorAddress, + name: randomName, + description: randomDescription, + isPublic: true, + isEnabled: true, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_06', 'EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + headers, + } + ); + + addContext(test, message.vali_updatePolicy_20); + assert.fail(message.vali_updatePolicy_20); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_6, + message.fail_updatePolicy_20, + 404 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with invalid apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: updatedRandomName, + description: updatedRandomDescription, + isPublic: true, + isEnabled: false, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + invalid_headers, + } + ); + + addContext(test, message.vali_updatePolicy_21); + assert.fail(message.vali_updatePolicy_21); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_updatePolicy_21, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka with incorrect apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: updatedRandomName, + description: updatedRandomDescription, + isPublic: true, + isEnabled: false, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + incorrect_headers, + } + ); + + addContext(test, message.vali_updatePolicy_22); + assert.fail(message.vali_updatePolicy_22); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_updatePolicy_22, + 400 + ); + } + } + ); + + it( + 'REGRESSION: Validate the update policy endpoint of Arka without apikey on the ' + + randomChainName + + ' network', + async function () { + var test = this; + try { + // define the payload + const requestData = { + id: newId, + walletAddress: data.sponsorAddress, + name: updatedRandomName, + description: updatedRandomDescription, + isPublic: true, + isEnabled: false, + isApplicableToAllNetworks: true, + enabledChains: [randomChainId], + supportedEPVersions: ['EPV_07'], + isPerpetual: true, + }; + + // send POST request with headers and data + await axios.put( + `${data.arka_fqdn}/${data.arka_updatePolicy}`, + requestData, + { + withoutapikey_headers, + } + ); + + addContext(test, message.vali_updatePolicy_23); + assert.fail(message.vali_updatePolicy_23); + } catch (e) { + handleErrorValidation( + test, + e, + constant.sponsorshipPolicy_walletAddress_13, + message.fail_updatePolicy_23, + 400 + ); + } + } + ); +}); diff --git a/test/utils/baseTest.js b/test/utils/baseTest.js index 2e13215..ef2dab2 100644 --- a/test/utils/baseTest.js +++ b/test/utils/baseTest.js @@ -1,4 +1,5 @@ -import helper from './helper.js'; +import addContext from 'mochawesome/addContext.js'; +import { expect, assert } from 'chai'; function customRetryAsync(fn, maxRetries) { return new Promise((resolve, reject) => { @@ -26,4 +27,61 @@ function customRetryAsync(fn, maxRetries) { }); } -export default customRetryAsync; +function getRandomNetwork(arr) { + const randomIndex = Math.floor(Math.random() * arr.length); + return randomIndex; +} + +function getEpochTimeInSeconds() { + const currentTime = Math.floor(new Date().getTime() / 1000); + return currentTime; +} + +function generateRandomString(length) { + const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'; + let result = ''; + for (let i = 0; i < length; i++) { + result += characters.charAt(Math.floor(Math.random() * characters.length)); + } + return result; +} + +function handleErrorValidation( + test, + errorResponse, + constantMessage, + validationMessage, + responseCode +) { + try { + let error = errorResponse.response.data.error; + + // Validate the error message and HTTP status code + if (error.includes(constantMessage)) { + expect(errorResponse.response.status).to.equal(responseCode); + + expect(errorResponse.response.data).to.have.property( + 'error', + constantMessage + ); + } else { + // Add error context and fail the test + const errorString = errorResponse.toString(); + addContext(test, errorString); + assert.fail(validationMessage); + } + } catch (err) { + // Handle unexpected errors + const errorString = err.toString(); + addContext(test, errorString); + assert.fail('Unexpected error occurred.'); + } +} + +export { + customRetryAsync, + getRandomNetwork, + getEpochTimeInSeconds, + generateRandomString, + handleErrorValidation, +}; diff --git a/test/utils/sharedData_mainnet.js b/test/utils/sharedData_mainnet.js index a8584fc..7adc2e1 100644 --- a/test/utils/sharedData_mainnet.js +++ b/test/utils/sharedData_mainnet.js @@ -21,27 +21,27 @@ const chainConfigs = { invalidProviderNetwork: 'http://rpc.etherspot.io/gnosis', otherProviderNetwork: 'https://rpc.etherspot.io/polygon', }, - 137: { - name: 'polygon', - invalidId: '110 ', - incorrectId: '1730', - toChainId: '100', - toTokenAddress: '0xDDAfbb505ad214D7b80b1f830fcCc89B60fb7A83', - toTokenName: 'USDC', - incorrectToTokenAddress: '0xDDAfbb505ad214D7b80b1f830fcCc19B60fb7A83', - invalidToTokenAddress: '0xDDAfbb505ad214D7b80b1f830fcCc89B60fb7A8', - tokenAddress: '0x2791Bca1f2de4661ED88A30C99A7a9449Aa84174', - tokenName: 'USDC', - incorrectTokenAddress: '0x2791Bca1f2de4661ED88A30C99A719449Aa84174', - invalidTokenAddress: '0x2791Bca1f2de4661ED88A30C99A7a9449Aa8417', - tokenAddressUsdt: '0xc2132D05D31c914a87C6611C10748AEb04B58e8F', - tokenNameUsdt: 'USDT', - incorrectTokenAddressUsdt: '0xAECDBa5770353855a9F068104A40E0f32e2605C6', - invalidTokenAddressUsdt: '0xc2132D05D31c914a87C6611C10748AEb04B58e8', - providerNetwork: 'https://polygon-bundler.etherspot.io', - invalidProviderNetwork: 'http://polygon-bundler.etherspot.io', - otherProviderNetwork: 'https://arbitrum-bundler.etherspot.io', - }, + // 137: { + // name: 'polygon', + // invalidId: '110 ', + // incorrectId: '1730', + // toChainId: '100', + // toTokenAddress: '0xDDAfbb505ad214D7b80b1f830fcCc89B60fb7A83', + // toTokenName: 'USDC', + // incorrectToTokenAddress: '0xDDAfbb505ad214D7b80b1f830fcCc19B60fb7A83', + // invalidToTokenAddress: '0xDDAfbb505ad214D7b80b1f830fcCc89B60fb7A8', + // tokenAddress: '0x2791Bca1f2de4661ED88A30C99A7a9449Aa84174', + // tokenName: 'USDC', + // incorrectTokenAddress: '0x2791Bca1f2de4661ED88A30C99A719449Aa84174', + // invalidTokenAddress: '0x2791Bca1f2de4661ED88A30C99A7a9449Aa8417', + // tokenAddressUsdt: '0xc2132D05D31c914a87C6611C10748AEb04B58e8F', + // tokenNameUsdt: 'USDT', + // incorrectTokenAddressUsdt: '0xAECDBa5770353855a9F068104A40E0f32e2605C6', + // invalidTokenAddressUsdt: '0xc2132D05D31c914a87C6611C10748AEb04B58e8', + // providerNetwork: 'https://polygon-bundler.etherspot.io', + // invalidProviderNetwork: 'http://polygon-bundler.etherspot.io', + // otherProviderNetwork: 'https://arbitrum-bundler.etherspot.io', + // }, }; // convert configurations to arrays for backward compatibility From bc2c59e01ac05e0e82fb779b8259c14eb8174e30 Mon Sep 17 00:00:00 2001 From: Jineshdarjee Date: Tue, 21 Jan 2025 19:23:09 +0530 Subject: [PATCH 3/3] Modified arka test cases --- .env | 25 +- package.json | 4 +- test/data/constant.json | 12 +- test/data/messages.json | 251 ++- test/data/testData.json | 16 +- .../paymasterAPIs/arka/addStake.spec.js | 456 +++++ .../paymasterAPIs/arka/checkWhitelist.spec.js | 1810 ++++++++++++++--- .../paymasterAPIs/arka/deployVp.spec.js | 357 ++++ .../paymasterAPIs/arka/deposit.spec.js | 1276 +++++++++--- .../arka/getAllWhitelist.spec.js | 12 +- .../paymasterAPIs/arka/metadata.spec.js | 17 +- .../paymasterAPIs/arka/pimlicoAddress.spec.js | 20 +- .../arka/removeWhitelist.spec.js | 1396 +++++++++---- .../paymasterAPIs/arka/whitelist.spec.js | 1387 ++++++++++--- .../paymasterAPIs/arka/addStake.spec.js | 456 +++++ .../paymasterAPIs/arka/checkWhitelist.spec.js | 1810 ++++++++++++++--- .../paymasterAPIs/arka/deployVp.spec.js | 357 ++++ .../paymasterAPIs/arka/deposit.spec.js | 1276 +++++++++--- .../arka/getAllWhitelist.spec.js | 12 +- .../paymasterAPIs/arka/metadata.spec.js | 17 +- .../paymasterAPIs/arka/pimlicoAddress.spec.js | 20 +- .../arka/removeWhitelist.spec.js | 1396 +++++++++---- .../paymasterAPIs/arka/whitelist.spec.js | 1387 ++++++++++--- 23 files changed, 11419 insertions(+), 2351 deletions(-) create mode 100644 test/specs/mainnet/paymasterAPIs/arka/addStake.spec.js create mode 100644 test/specs/mainnet/paymasterAPIs/arka/deployVp.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/arka/addStake.spec.js create mode 100644 test/specs/testnet/paymasterAPIs/arka/deployVp.spec.js diff --git a/.env b/.env index 34ee3ea..11e1d50 100644 --- a/.env +++ b/.env @@ -1,11 +1,14 @@ -## SDK IDs -PRIVATE_KEY=private_key -PROJECT_KEY=project_key -PROJECT_KEY_TESTNET=project_key_testnet -API_KEY=api_key -API_KEY_ARKA=api_key_arka -API_KEY_SESSIONKEY=api_key_sessionkey -INCORRECT_API_KEY_ARKA=incorrect_api_key_arka -INVALID_API_KEY_ARKA=invalid_api_key_arka -BUNDLER_API_KEY=bundler_api_key -DATA_API_KEY=data_api_key \ No newline at end of file +## PRIME SDK IDs +PRIVATE_KEY=_private_key_ +PROJECT_KEY=_project_key_ +PROJECT_KEY_TESTNET=_project_key_testnet_ +API_KEY=_api_key_ +API_KEY_ARKA=_api_key_arka_ +INCORRECT_API_KEY_ARKA=_incorrect_api_key_arka_ +INVALID_API_KEY_ARKA=_invalid_api_key_arka_ +BUNDLER_API_KEY=_bundler_api_key_ +DATA_API_KEY=_data_api_key_ +ARKA_API_KEY_PROD=_arka_api_key_prod_ +INVALID_ARKA_API_KEY_PROD=_invalid_arka_api_key_prod_ +ARKA_API_KEY_QA=_arka_api_key_qa_ +INVALID_ARKA_API_KEY_QA=_invalid_arka_api_key_qa_ \ No newline at end of file diff --git a/package.json b/package.json index 3090250..b6fef62 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "main": "index.js", "type": "module", "scripts": { - "test": "mocha --timeout 600000 --spec test/specs/paymasterAPIs/skandha/skandha_feeHistory.spec.js --reporter mochawesome --require mochawesome/register --reporter-options charts=true,code=false,showHooks=always", + "test": "mocha --timeout 600000 --spec test/specs/testnet/paymasterAPIs/arka/checkWhitelist.spec.js --reporter mochawesome --require mochawesome/register --reporter-options charts=true,code=false,showHooks=always", "test-mainnet": "mocha --timeout 600000 --spec test/specs/mainnet/version/version.spec.js --spec test/specs/mainnet/*/oldWallet_*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", "test-mainnet-precondition": "mocha --timeout 600000 --spec test/specs/mainnet/precondition/*_newWallet.spec.js", "test-mainnet-postcondition": "mocha --timeout 600000 --spec test/specs/mainnet/postcondition/*_newWallet.spec.js", @@ -20,7 +20,7 @@ "test-testnet-paymasterapi": "mocha --timeout 600000 --spec test/specs/testnet/paymasterAPIs/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always", "test-service-benchmark": "mocha --timeout 600000 --spec test/specs/hostedServiceBanchmark/*.spec.js --reporter mochawesome --reporter-options charts=true,code=false,showHooks=always" }, - "author": "", + "author": "Jinesh Darji", "license": "ISC", "dependencies": { "@babel/eslint-parser": "^7.22.9", diff --git a/test/data/constant.json b/test/data/constant.json index b96d0ef..cdca89a 100644 --- a/test/data/constant.json +++ b/test/data/constant.json @@ -25,15 +25,25 @@ "property_undefined": "Cannot set properties of undefined", "remove_whitelist_1": "is not whitelisted", "remove_whitelist_2": "Successfully removed whitelisted addresses with transaction Hash", + "remove_whitelist_3": "Successfully removed whitelisted addresses", + "remove_whitelist_4": "Addresses sent were not whitelisted", "add_whitelist_1": "Successfully whitelisted with transaction Hash", "add_whitelist_2": "already whitelisted", - "add_whitelist_3": "Successfully whitelisted with transaction Hash", + "add_whitelist_3": "Successfully whitelisted", + "add_whitelist_4": "Invalid Address passed", + "add_whitelist_5": "Addresses were already added", "check_whitelist_1": "Already added", "check_whitelist_2": "Not added yet", "getAllWhitelist_1": "No whitelist were found on the given apiKey/policyId", "deposit_1": "Successfully deposited", "deposit_2": "Successfully deposited with transaction Hash", "deposit_3": "Balance is less than the amount to be deposited", + "deployVp_1": "Verifying paymaster already deployed", + "deployVp_2": "Invalid EntryPoint version", + "deployVp_3": "Verifying paymaster not deployed", + "addStake_1": "Successfully staked with transaction Hash", + "addStake_2": "Invalid amount to stake", + "addStake_3": "Failed to add stake", "invalid_network_1": "Invalid network/token", "invalid_network_2": "could not detect network", diff --git a/test/data/messages.json b/test/data/messages.json index dab412e..eb789ee 100644 --- a/test/data/messages.json +++ b/test/data/messages.json @@ -190,15 +190,6 @@ "fail_getTokenPaymasterAddress_2": "The respective validation not displayed while getting token paymaster address with incorrect token", "fail_getTokenPaymasterAddress_3": "The respective validation not displayed while getting token paymaster address without token", "vali_removeWhitelist_1": "Removed the address from whitelisted successfully.", - "vali_removeWhitelist_2": "The address is not whitelisted.", - "vali_removeWhitelist_3": "The validation is displayed while removing address from whitelist with invalid address.", - "vali_removeWhitelist_4": "The validation is displayed while removing address from whitelist with incorrect address.", - "fail_removeWhitelist_1": "An error is displayed while removing the address from whitelisting.", - "fail_removeWhitelist_2": "An error is displayed while calling the remove whitelist address function of arka.", - "fail_removeWhitelist_3": "The validation is not displayed while remove whitelist address with invalid address.", - "fail_removeWhitelist_4": "The validation is not displayed while remove whitelist address with incorrect address.", - "fail_removeWhitelist_5": "The validation is not displayed while remove whitelist address with random address.", - "fail_removeWhitelist_6": "The remove whitelist address function performed successfully with multiple random addresses.", "vali_addWhitelist_1": "The address/es is/are whitelisted successfully.", "vali_addWhitelist_2": "The address is already whitelisted.", "vali_addWhitelist_3": "The validation is displayed while removing address from whitelist with invalid address.", @@ -211,19 +202,11 @@ "fail_addWhitelist_5": "The validation is not displayed while add whitelist address with random address.", "fail_addWhitelist_6": "The add whitelist address function performed successfully with multiple random addresses.", "fail_addWhitelist_7": "An error is displayed while whitelisting the random address.", + "vali_checkWhitelist_0": "The address is displayed as a whitelisted.", + "vali_checkWhitelist_00": "The address is not displayed as a whitelisted.", "vali_checkWhitelist_1": "The validation is displayed while checking address from whitelist with invalid address.", - "vali_checkWhitelist_2": "The validation is displayed while checking address from whitelist with incorrect address.", - "vali_checkWhitelist_3": "The validation message is displayed while checking the whitelist address without address.", - "fail_checkWhitelist_1": "An error is displayed while calling the check whitelist address function of arka.", - "fail_checkWhitelist_2": "The validation is not displayed while checking whitelist address with invalid address.", - "fail_checkWhitelist_3": "The validation is not displayed while checking whitelist address with incorrect address.", "fail_checkWhitelist_4": "The validation is not displayed while checking whitelist address with random address.", - "fail_checkWhitelist_5": "The check whitelist address function performed successfully without address.", "vali_deposit_1": "The amount deposited successfully.", - "vali_deposit_2": "The validation message is displayed while deposit with invalid amount.", - "fail_deposit_1": "An error is displayed while perform the deposit.", - "fail_deposit_2": "An error is displayed while calling the deposit function of arka.", - "fail_deposit_3": "The validation is not displayed while deposit with invalid amount.", "arkaFunction_insufficientBalance": "DUE TO INSUFFICIENT WALLET BALANCE, SKIPPING TEST CASE OF THE ARKA FUNCTION", "_comment9": "Swap - Exchange offers", "vali_exchangeOffers_1": "Found exchange supported assets.", @@ -900,38 +883,43 @@ "fail_latestPolicyWalletAddressEPVersionandChainid_14": "An error message is displayed when API key is not added while fetching the latest policy of a particular wallet address, entry point version and chain id", "fail_whitelistv1_1": "An error message is displayed in the whitelist v1 API.", "fail_whitelistv2_1": "An error message is displayed in the whitelist v2 API.", - "fail_whitelistv1_2": "The validation message is not displayed when whitelisting address with invalid url and v1 API.", - "fail_whitelistv2_2": "The validation message is not displayed when whitelisting address with invalid url and v2 API.", - "vali_whitelistv1_2": "The validation message is displayed when removing non whitelisted address with invalid url and v1 API.", - "vali_whitelistv2_2": "The validation message is displayed when removing non whitelisted address with invalid url and v2 API.", - "fail_whitelistv1_3": "The validation message is not displayed when whitelisting address with incorrect url and v1 API.", - "fail_whitelistv2_3": "The validation message is not displayed when whitelisting address with incorrect url and v2 API.", - "vali_whitelistv1_3": "The validation message is displayed when removing non whitelisted address with incorrect url and v1 API.", - "vali_whitelistv2_3": "The validation message is displayed when removing non whitelisted address with incorrect url and v2 API.", - "fail_whitelistv1_4": "The validation message is not displayed when whitelisting address with invalid address and v1 API.", - "fail_whitelistv2_4": "The validation message is not displayed when whitelisting address with invalid address and v2 API.", - "vali_whitelistv1_4": "The validation message is displayed when removing non whitelisted address with invalid address and v1 API.", - "vali_whitelistv2_4": "The validation message is displayed when removing non whitelisted address with invalid address and v2 API.", - "fail_whitelistv1_44": "The validation message is not displayed when whitelisting address with incorrect address and v1 API.", - "fail_whitelistv2_44": "The validation message is not displayed when whitelisting address with incorrect address and v2 API.", - "vali_whitelistv1_44": "The validation message is displayed when removing non whitelisted address with incorrect address and v1 API.", - "vali_whitelistv2_44": "The validation message is displayed when removing non whitelisted address with incorrect address and v2 API.", - "fail_whitelistv1_5": "The validation message is not displayed when whitelisting address without address and v1 API.", - "fail_whitelistv2_5": "The validation message is not displayed when whitelisting address without address and v2 API.", - "vali_whitelistv1_5": "The validation message is displayed when removing non whitelisted address without address and v1 API.", - "vali_whitelistv2_5": "The validation message is displayed when removing non whitelisted address without address and v2 API.", - "fail_whitelistv1_6": "The validation message is not displayed when whitelisting address with invalid apikey and v1 API.", - "fail_whitelistv2_6": "The validation message is not displayed when whitelisting address with invalid apikey and v2 API.", - "vali_whitelistv1_6": "The validation message is displayed when removing non whitelisted address with invalid apikey and v1 API.", - "vali_whitelistv2_6": "The validation message is displayed when removing non whitelisted address with invalid apikey and v2 API.", - "fail_whitelistv1_7": "The validation message is not displayed when whitelisting address without apikey and v1 API.", - "fail_whitelistv2_7": "The validation message is not displayed when whitelisting address without apikey and v2 API.", - "vali_whitelistv1_7": "The validation message is displayed when removing non whitelisted address without apikey and v1 API.", - "vali_whitelistv2_7": "The validation message is displayed when removing non whitelisted address without apikey and v2 API.", - "fail_whitelistv1_8": "The validation message is not displayed when whitelisting address without chainid and v1 API.", - "fail_whitelistv2_8": "The validation message is not displayed when whitelisting address without chainid and v2 API.", - "vali_whitelistv1_8": "The validation message is displayed when removing non whitelisted address without chainid and v1 API.", - "vali_whitelistv2_8": "The validation message is displayed when removing non whitelisted address without chainid and v2 API.", + "fail_whitelistv1_0": "The validation message is not displayed when non whitelisted address with v1 API.", + "fail_whitelistv1_00": "The validation message is not displayed when non whitelisted address with v1 API and userVp parameter.", + "fail_whitelistv2_0": "The validation message is not displayed when non whitelisted address with v2 API.", + "vali_whitelistv1_1": "The validation message is displayed when non whitelisted address with v1 API.", + "vali_whitelistv2_1": "The validation message is displayed when non whitelisted address with v2 API.", + "fail_whitelistv1_2": "The validation message is not displayed when whitelisted address with invalid url and v1 API.", + "fail_whitelistv2_2": "The validation message is not displayed when whitelisted address with invalid url and v2 API.", + "vali_whitelistv1_2": "The validation message is displayed when non whitelisted address with invalid url and v1 API.", + "vali_whitelistv2_2": "The validation message is displayed when non whitelisted address with invalid url and v2 API.", + "fail_whitelistv1_3": "The validation message is not displayed when whitelisted address with incorrect url and v1 API.", + "fail_whitelistv2_3": "The validation message is not displayed when whitelisted address with incorrect url and v2 API.", + "vali_whitelistv1_3": "The validation message is displayed when non whitelisted address with incorrect url and v1 API.", + "vali_whitelistv2_3": "The validation message is displayed when non whitelisted address with incorrect url and v2 API.", + "fail_whitelistv1_4": "The validation message is not displayed when whitelisted address with invalid address and v1 API.", + "fail_whitelistv2_4": "The validation message is not displayed when whitelisted address with invalid address and v2 API.", + "vali_whitelistv1_4": "The validation message is displayed when non whitelisted address with invalid address and v1 API.", + "vali_whitelistv2_4": "The validation message is displayed when non whitelisted address with invalid address and v2 API.", + "fail_whitelistv1_44": "The validation message is not displayed when whitelisted address with incorrect address and v1 API.", + "fail_whitelistv2_44": "The validation message is not displayed when whitelisted address with incorrect address and v2 API.", + "vali_whitelistv1_44": "The validation message is displayed when non whitelisted address with incorrect address and v1 API.", + "vali_whitelistv2_44": "The validation message is displayed when non whitelisted address with incorrect address and v2 API.", + "fail_whitelistv1_5": "The validation message is not displayed when whitelisted address without address and v1 API.", + "fail_whitelistv2_5": "The validation message is not displayed when whitelisted address without address and v2 API.", + "vali_whitelistv1_5": "The validation message is displayed when non whitelisted address without address and v1 API.", + "vali_whitelistv2_5": "The validation message is displayed when non whitelisted address without address and v2 API.", + "fail_whitelistv1_6": "The validation message is not displayed when whitelisted address with invalid apikey and v1 API.", + "fail_whitelistv2_6": "The validation message is not displayed when whitelisted address with invalid apikey and v2 API.", + "vali_whitelistv1_6": "The validation message is displayed when non whitelisted address with invalid apikey and v1 API.", + "vali_whitelistv2_6": "The validation message is displayed when non whitelisted address with invalid apikey and v2 API.", + "fail_whitelistv1_7": "The validation message is not displayed when whitelisted address without apikey and v1 API.", + "fail_whitelistv2_7": "The validation message is not displayed when whitelisted address without apikey and v2 API.", + "vali_whitelistv1_7": "The validation message is displayed when non whitelisted address without apikey and v1 API.", + "vali_whitelistv2_7": "The validation message is displayed when non whitelisted address without apikey and v2 API.", + "fail_whitelistv1_8": "The validation message is not displayed when whitelisted address without chainid and v1 API.", + "fail_whitelistv2_8": "The validation message is not displayed when whitelisted address without chainid and v2 API.", + "vali_whitelistv1_8": "The validation message is displayed when non whitelisted address without chainid and v1 API.", + "vali_whitelistv2_8": "The validation message is displayed when non whitelisted address without chainid and v2 API.", "fail_checkWhitelistv1_1": "An error message is displayed in the check whiteist v1 API.", "fail_checkWhitelistv2_1": "An error message is displayed in the check whiteist v2 API.", "fail_checkWhitelistv1_2": "The validation message is not displayed when checking whitelisted address with invalid url and v1 API.", @@ -1003,6 +991,7 @@ "fail_removeWhitelistv1_1": "An error message is displayed in the remove whitelist v1 API.", "fail_removeWhitelistv2_1": "An error message is displayed in the remove whitelist v2 API.", "fail_removeWhitelistv1_0": "The validation message is not displayed when removing non whitelisted address with v1 API.", + "fail_removeWhitelistv1_00": "The validation message is not displayed when removing non whitelisted address with v1 API and userVp parameter.", "fail_removeWhitelistv2_0": "The validation message is not displayed when removing non whitelisted address with v2 API.", "vali_removeWhitelistv1_1": "The validation message is displayed when removing non whitelisted address with v1 API.", "vali_removeWhitelistv2_1": "The validation message is displayed when removing non whitelisted address with v2 API.", @@ -1084,6 +1073,166 @@ "vali_metadata_7": "The validation message is displayed while performing metadata without apikey API.", "fail_metadata_8": "The validation message is not displayed while performing metadata without chainid API.", "vali_metadata_8": "The validation message is displayed while performing metadata without chainid API.", + "vali_deployVerifyingPaymaster_verifyingPaymaster": "The verifyingPaymaster parameter is not displayed in the deployVerifyingPaymaster response.", + "vali_deployVerifyingPaymaster_txHash": "The txHash parameter is not displayed in the deployVerifyingPaymaster response.", + "fail_deployVerifyingPaymaster_1": "The validation message is not displayed while performing deployVerifyingPaymaster.", + "vali_deployVerifyingPaymaster_1": "The validation message is displayed while performing deployVerifyingPaymaster with already deployed.", + "fail_deployVerifyingPaymaster_2": "The validation message is not displayed while performing deployVerifyingPaymaster with invalid entry point version.", + "vali_deployVerifyingPaymaster_2": "The validation message is displayed while performing deployVerifyingPaymaster with invalid entry point version.", + "fail_deployVerifyingPaymaster_3": "The validation message is not displayed while performing deployVerifyingPaymaster with incorrect entry point version.", + "vali_deployVerifyingPaymaster_3": "The validation message is displayed while performing deployVerifyingPaymaster with incorrect entry point version.", + "fail_deployVerifyingPaymaster_4": "The validation message is not displayed while performing deployVerifyingPaymaster without entry point version.", + "vali_deployVerifyingPaymaster_4": "The validation message is displayed while performing deployVerifyingPaymaster without entry point version.", + "fail_deployVerifyingPaymaster_5": "The validation message is not displayed while performing deployVerifyingPaymaster with incorrect apikey.", + "vali_deployVerifyingPaymaster_5": "The validation message is displayed while performing deployVerifyingPaymaster with incorrect apikey.", + "fail_deployVerifyingPaymaster_6": "The validation message is not displayed while performing deployVerifyingPaymaster with invalid apikey.", + "vali_deployVerifyingPaymaster_6": "The validation message is displayed while performing deployVerifyingPaymaster with invalid apikey.", + "fail_deployVerifyingPaymaster_7": "The validation message is not displayed while performing deployVerifyingPaymaster without apikey.", + "vali_deployVerifyingPaymaster_7": "The validation message is displayed while performing deployVerifyingPaymaster without apikey.", + "fail_deployVerifyingPaymaster_8": "The validation message is not displayed while performing deployVerifyingPaymaster with invalid chainId.", + "vali_deployVerifyingPaymaster_8": "The validation message is displayed while performing deployVerifyingPaymaster with invalid chainId.", + "fail_deployVerifyingPaymaster_9": "The validation message is not displayed while performing deployVerifyingPaymaster without chainId.", + "vali_deployVerifyingPaymaster_9": "The validation message is displayed while performing deployVerifyingPaymaster without chainId.", + + "vali_addStake_message": "The message parameter is not displayed in the addStake response.", + "vali_addStake_messageText": "The text of the message parameter is not displayed correct in the addStake response.", + "fail_addStake_1": "The validation message is not displayed while performing addStake.", + "fail_addStake_2": "The validation message is not displayed while performing addStake with invalid entry point version.", + "vali_addStake_2": "The validation message is displayed while performing addStake with invalid entry point version.", + "fail_addStake_3": "The validation message is not displayed while performing addStake with incorrect entry point version.", + "vali_addStake_3": "The validation message is displayed while performing addStake with incorrect entry point version.", + "fail_addStake_4": "The validation message is not displayed while performing addStake without entry point version.", + "vali_addStake_4": "The validation message is displayed while performing addStake without entry point version.", + "fail_addStake_5": "The validation message is not displayed while performing addStake with incorrect apikey.", + "vali_addStake_5": "The validation message is displayed while performing addStake with incorrect apikey.", + "fail_addStake_6": "The validation message is not displayed while performing addStake with invalid apikey.", + "vali_addStake_6": "The validation message is displayed while performing addStake with invalid apikey.", + "fail_addStake_7": "The validation message is not displayed while performing addStake without apikey.", + "vali_addStake_7": "The validation message is displayed while performing addStake without apikey.", + "fail_addStake_8": "The validation message is not displayed while performing addStake with invalid chainId.", + "vali_addStake_8": "The validation message is displayed while performing addStake with invalid chainId.", + "fail_addStake_9": "The validation message is not displayed while performing addStake without chainId.", + "vali_addStake_9": "The validation message is displayed while performing addStake without chainId.", + "fail_addStake_10": "The validation message is not displayed while performing addStake with invalid value.", + "vali_addStake_10": "The validation message is displayed while performing addStake with invalid value.", + "fail_addStake_11": "The validation message is not displayed while performing addStake with incorrect value.", + "vali_addStake_11": "The validation message is displayed while performing addStake with incorrect value.", + "fail_addStake_12": "The validation message is not displayed while performing addStake without value.", + "vali_addStake_12": "The validation message is displayed while performing addStake without value.", + + "vali_deposit_message": "The message parameter is not displayed in the deposit response.", + "vali_deposit_messageText": "The text of the message parameter is not displayed correct in the deposit response.", + "fail_deposit_1": "The validation message is not displayed while performing deposit.", + "fail_deposit1_1": "The validation message is not displayed while performing deposit with userVp parameter as a true.", + "fail_deposit2_1": "The validation message is not displayed while performing deposit with userVp parameter as a false.", + "fail_deposit_2": "The validation message is not displayed while performing deposit with invalid entry point version.", + "vali_deposit_2": "The validation message is displayed while performing deposit with invalid entry point version.", + "fail_deposit_3": "The validation message is not displayed while performing deposit with incorrect entry point version.", + "vali_deposit_3": "The validation message is displayed while performing deposit with incorrect entry point version.", + "fail_deposit_4": "The validation message is not displayed while performing deposit without entry point version.", + "vali_deposit_4": "The validation message is displayed while performing deposit without entry point version.", + "fail_deposit_5": "The validation message is not displayed while performing deposit with incorrect apikey.", + "vali_deposit_5": "The validation message is displayed while performing deposit with incorrect apikey.", + "fail_deposit_6": "The validation message is not displayed while performing deposit with invalid apikey.", + "vali_deposit_6": "The validation message is displayed while performing deposit with invalid apikey.", + "fail_deposit_7": "The validation message is not displayed while performing deposit without apikey.", + "vali_deposit_7": "The validation message is displayed while performing deposit without apikey.", + "fail_deposit_8": "The validation message is not displayed while performing deposit with invalid chainId.", + "vali_deposit_8": "The validation message is displayed while performing deposit with invalid chainId.", + "fail_deposit_9": "The validation message is not displayed while performing deposit without chainId.", + "vali_deposit_9": "The validation message is displayed while performing deposit without chainId.", + "fail_deposit_10": "The validation message is not displayed while performing deposit with invalid value.", + "vali_deposit_10": "The validation message is displayed while performing deposit with invalid value.", + "fail_deposit_11": "The validation message is not displayed while performing deposit with incorrect value.", + "vali_deposit_11": "The validation message is displayed while performing deposit with incorrect value.", + "fail_deposit_12": "The validation message is not displayed while performing deposit without value.", + "vali_deposit_12": "The validation message is displayed while performing deposit without value.", + "fail_deposit_13": "The validation message is not displayed while performing deposit with userVp parameter with invalid data.", + "vali_deposit_13": "The validation message is displayed while performing deposit with userVp parameter with invalid data.", + + "vali_whitelist_message": "The message parameter is not displayed in the whitelist response.", + "vali_whitelist_messageText": "The text of the message parameter is not displayed correct in the whitelist response.", + "fail_whitelist_1": "The validation message is not displayed while performing whitelist.", + "fail_whitelist1_1": "The validation message is not displayed while performing whitelist with userVp parameter as a true.", + "fail_whitelist2_1": "The validation message is not displayed while performing whitelist with userVp parameter as a false.", + "fail_whitelist_2": "The validation message is not displayed while performing whitelist with invalid url.", + "vali_whitelist_2": "The validation message is displayed while performing whitelist with invalid url.", + "fail_whitelist_3": "The validation message is not displayed while performing whitelist with incorrect url.", + "vali_whitelist_3": "The validation message is displayed while performing whitelist with incorrect url.", + "fail_whitelist_5": "The validation message is not displayed while performing whitelist with incorrect apikey.", + "vali_whitelist_5": "The validation message is displayed while performing whitelist with incorrect apikey.", + "fail_whitelist_6": "The validation message is not displayed while performing whitelist with invalid apikey.", + "vali_whitelist_6": "The validation message is displayed while performing whitelist with invalid apikey.", + "fail_whitelist_7": "The validation message is not displayed while performing whitelist without apikey.", + "vali_whitelist_7": "The validation message is displayed while performing whitelist without apikey.", + "fail_whitelist_8": "The validation message is not displayed while performing whitelist with invalid chainId.", + "vali_whitelist_8": "The validation message is displayed while performing whitelist with invalid chainId.", + "fail_whitelist_9": "The validation message is not displayed while performing whitelist without chainId.", + "vali_whitelist_9": "The validation message is displayed while performing whitelist without chainId.", + "fail_whitelist_10": "The validation message is not displayed while performing whitelist with invalid value.", + "vali_whitelist_10": "The validation message is displayed while performing whitelist with invalid value.", + "fail_whitelist_11": "The validation message is not displayed while performing whitelist with incorrect value.", + "vali_whitelist_11": "The validation message is displayed while performing whitelist with incorrect value.", + "fail_whitelist_12": "The validation message is not displayed while performing whitelist without value.", + "vali_whitelist_12": "The validation message is displayed while performing whitelist without value.", + "fail_whitelist_13": "The validation message is not displayed while performing whitelist with userVp parameter with invalid data.", + "vali_whitelist_13": "The validation message is displayed while performing whitelist with userVp parameter with invalid data.", + + "vali_removeWhitelist_message": "The message parameter is not displayed in the removeWhitelist response.", + "vali_removeWhitelist_messageText": "The text of the message parameter is not displayed correct in the removeWhitelist response.", + "fail_removeWhitelist_1": "The validation message is not displayed while performing removeWhitelist.", + "fail_removeWhitelist1_1": "The validation message is not displayed while performing removeWhitelist with userVp parameter as a true.", + "fail_removeWhitelist2_1": "The validation message is not displayed while performing removeWhitelist with userVp parameter as a false.", + "fail_removeWhitelist_2": "The validation message is not displayed while performing removeWhitelist with invalid url.", + "vali_removeWhitelist_2": "The validation message is displayed while performing removeWhitelist with invalid url.", + "fail_removeWhitelist_3": "The validation message is not displayed while performing removeWhitelist with incorrect url.", + "vali_removeWhitelist_3": "The validation message is displayed while performing removeWhitelist with incorrect url.", + "fail_removeWhitelist_5": "The validation message is not displayed while performing removeWhitelist with incorrect apikey.", + "vali_removeWhitelist_5": "The validation message is displayed while performing removeWhitelist with incorrect apikey.", + "fail_removeWhitelist_6": "The validation message is not displayed while performing removeWhitelist with invalid apikey.", + "vali_removeWhitelist_6": "The validation message is displayed while performing removeWhitelist with invalid apikey.", + "fail_removeWhitelist_7": "The validation message is not displayed while performing removeWhitelist without apikey.", + "vali_removeWhitelist_7": "The validation message is displayed while performing removeWhitelist without apikey.", + "fail_removeWhitelist_8": "The validation message is not displayed while performing removeWhitelist with invalid chainId.", + "vali_removeWhitelist_8": "The validation message is displayed while performing removeWhitelist with invalid chainId.", + "fail_removeWhitelist_9": "The validation message is not displayed while performing removeWhitelist without chainId.", + "vali_removeWhitelist_9": "The validation message is displayed while performing removeWhitelist without chainId.", + "fail_removeWhitelist_10": "The validation message is not displayed while performing removeWhitelist with invalid value.", + "vali_removeWhitelist_10": "The validation message is displayed while performing removeWhitelist with invalid value.", + "fail_removeWhitelist_11": "The validation message is not displayed while performing removeWhitelist with incorrect value.", + "vali_removeWhitelist_11": "The validation message is displayed while performing removeWhitelist with incorrect value.", + "fail_removeWhitelist_12": "The validation message is not displayed while performing removeWhitelist without value.", + "vali_removeWhitelist_12": "The validation message is displayed while performing removeWhitelist without value.", + "fail_removeWhitelist_13": "The validation message is not displayed while performing removeWhitelist with userVp parameter with invalid data.", + "vali_removeWhitelist_13": "The validation message is displayed while performing removeWhitelist with userVp parameter with invalid data.", + + "vali_checkWhitelist_message": "The message parameter is not displayed in the checkWhitelist response.", + "vali_checkWhitelist_messageText": "The text of the message parameter is not displayed correct in the checkWhitelist response.", + "fail_checkWhitelist_1": "The validation message is not displayed while performing checkWhitelist.", + "fail_checkWhitelist1_1": "The validation message is not displayed while performing checkWhitelist with userVp parameter as a true.", + "fail_checkWhitelist2_1": "The validation message is not displayed while performing checkWhitelist with userVp parameter as a false.", + "fail_checkWhitelist_2": "The validation message is not displayed while performing checkWhitelist with invalid url.", + "vali_checkWhitelist_2": "The validation message is displayed while performing checkWhitelist with invalid url.", + "fail_checkWhitelist_3": "The validation message is not displayed while performing checkWhitelist with incorrect url.", + "vali_checkWhitelist_3": "The validation message is displayed while performing checkWhitelist with incorrect url.", + "fail_checkWhitelist_5": "The validation message is not displayed while performing checkWhitelist with incorrect apikey.", + "vali_checkWhitelist_5": "The validation message is displayed while performing checkWhitelist with incorrect apikey.", + "fail_checkWhitelist_6": "The validation message is not displayed while performing checkWhitelist with invalid apikey.", + "vali_checkWhitelist_6": "The validation message is displayed while performing checkWhitelist with invalid apikey.", + "fail_checkWhitelist_7": "The validation message is not displayed while performing checkWhitelist without apikey.", + "vali_checkWhitelist_7": "The validation message is displayed while performing checkWhitelist without apikey.", + "fail_checkWhitelist_8": "The validation message is not displayed while performing checkWhitelist with invalid chainId.", + "vali_checkWhitelist_8": "The validation message is displayed while performing checkWhitelist with invalid chainId.", + "fail_checkWhitelist_9": "The validation message is not displayed while performing checkWhitelist without chainId.", + "vali_checkWhitelist_9": "The validation message is displayed while performing checkWhitelist without chainId.", + "fail_checkWhitelist_10": "The validation message is not displayed while performing checkWhitelist with invalid value.", + "vali_checkWhitelist_10": "The validation message is displayed while performing checkWhitelist with invalid value.", + "fail_checkWhitelist_11": "The validation message is not displayed while performing checkWhitelist with incorrect value.", + "vali_checkWhitelist_11": "The validation message is displayed while performing checkWhitelist with incorrect value.", + "fail_checkWhitelist_12": "The validation message is not displayed while performing checkWhitelist without value.", + "vali_checkWhitelist_12": "The validation message is displayed while performing checkWhitelist without value.", + "fail_checkWhitelist_13": "The validation message is not displayed while performing checkWhitelist with userVp parameter with invalid data.", + "vali_checkWhitelist_13": "The validation message is displayed while performing checkWhitelist with userVp parameter with invalid data.", "fail_skandha_getGasPrice_1": "An error message is displayed in the skandha_getGasPrice endpoint.", "fail_skandha_getGasPrice_2": "The validation message is not displayed in the skandha_getGasPrice endpoint when entered invalid method name.", diff --git a/test/data/testData.json b/test/data/testData.json index 4f1e713..05b9bc4 100644 --- a/test/data/testData.json +++ b/test/data/testData.json @@ -115,6 +115,7 @@ "sepolia_chainid": 11155111, "sparknet_chainid": 123, "amoy_chainid": 80002, + "invalid_chainId": 0, "invalid_arbitrum_chainid": 42656, "invalid_matic_chainid": 19, "invalid_optimism_chainid": 158, @@ -140,10 +141,14 @@ "recipient4": "0x81a13A210717b765C5733aD70D81deC1DF1834dd", "recipient5": "0xE05FB316eB8C4ba7288D43c1bd87BE8a8d16761C", "txCount": 50, + "ep06": "EPV_06", + "ep07": "EPV_07", + "invalid_epversion": "EP_07", + "incorrect_epversion": "EPV_09", "transactionHash": "0x639d74d9a681f19bc3a32ae4b5ee4b2562cec8458508babe0ac727d96b153360", "invalid_transactionHash": "0x639d74d9a681f19bc3a32ae4b5ee4b2562cec8458508babe0ac727d96b15336", "incorrect_transactionHash": "0x639d24d9a682f19bc3a32a14b5ee4b2562cec8458508ba1e0ac727d96b153160", - "arka_fqdn": "https://arka-qa.etherspot.io", + "arka_addPolicy": "add-policy", "arka_updatePolicy": "update-policy", "arka_deletePolicy": "delete-policy", @@ -197,8 +202,15 @@ "arka_metadata": "https://arka.etherspot.io/metadata", "arka_metadata_invalid": "http://arka.etherspot.io/metadata", "arka_metadata_incorrect": "https://arka.etherspot.io/mata", + "arka_deployVerifyingPaymaster": "https://arka.etherspot.io/deployVerifyingPaymaster", + "arka_deployVerifyingPaymaster_invalid": "http://arka.etherspot.io/deployVerifyingPaymaster", + "arka_deployVerifyingPaymaster_incorrect": "https://arka.etherspot.io/deployVP", + "arka_addStake": "https://arka.etherspot.io/addStake", + "arka_addStake_invalid": "http://arka.etherspot.io/addStake", + "arka_addStake_incorrect": "https://arka.etherspot.io/addSta", "address": "0xE4fAe3bEEeFEDAaC49548869fca6F180fd37CA40", "blockCount": 15, "invalid_hex": "1234567890", - "incorrect_hex": "0xC22cF2aA30A0181d6fE4B0B11aab238714Ba54f3" + "incorrect_hex": "0xC22cF2aA30A0181d6fE4B0B11aab238714Ba54f3", + "arka_sponsorAddress": "0xca668083399c4C927C2c9c42feB542555f5db17F" } diff --git a/test/specs/mainnet/paymasterAPIs/arka/addStake.spec.js b/test/specs/mainnet/paymasterAPIs/arka/addStake.spec.js new file mode 100644 index 0000000..9daf8e5 --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/arka/addStake.spec.js @@ -0,0 +1,456 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the add stake endpoint of the Arka', function () { + it( + 'SMOKE: Validate the add stake endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.ep07, data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty(response.data.message, message.vali_addStake_message); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.addStake_1, + message.vali_addStake_messageText + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_1); + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with invalid entry points on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.invalid_epversion, data.value], // invalid entry point + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_2); + console.log(message.fail_addStake_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_2)) { + addContext(test, message.vali_addStake_2); + console.log(message.vali_addStake_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with incorrect entry points on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.incorrect_epversion, data.value], // incorrect entry point + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_3); + console.log(message.fail_addStake_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_2)) { + addContext(test, message.vali_addStake_3); + console.log(message.vali_addStake_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka without entry points on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.value], // without entry point + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_4); + console.log(message.fail_addStake_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.addStake_2)) { + addContext(test, message.vali_addStake_4); + console.log(message.vali_addStake_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with invalid value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.ep07, data.invalidValue], // invalid value + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_10); + console.log(message.fail_addStake_10); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.addStake_2)) { + addContext(test, message.vali_addStake_10); + console.log(message.vali_addStake_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_10); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with exceeded value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.ep07, data.exceededValue], // exceeded value + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_11); + console.log(message.fail_addStake_11); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.addStake_3)) { + addContext(test, message.vali_addStake_11); + console.log(message.vali_addStake_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_11); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka without value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.ep07], // without value + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_12); + console.log(message.fail_addStake_12); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.addStake_2)) { + addContext(test, message.vali_addStake_12); + console.log(message.vali_addStake_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_12); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with incorrect apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.DATA_API_KEY}&chainId=${randomChainId}`; // incorrect apikey + + // define the payload + const requestData = { + params: [data.ep07, data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_5); + console.log(message.fail_addStake_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_addStake_5); + console.log(message.vali_addStake_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with invalid apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [data.ep07, data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_6); + console.log(message.fail_addStake_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_addStake_6); + console.log(message.vali_addStake_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka without apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [data.ep07, data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_7); + console.log(message.fail_addStake_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_addStake_7); + console.log(message.vali_addStake_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with invalid chainId on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${data.invalid_chainId}`; // invalid chainId + + // define the payload + const requestData = { + params: [data.ep07, data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_8); + console.log(message.fail_addStake_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_3)) { + addContext(test, message.vali_addStake_8); + console.log(message.vali_addStake_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_8); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka without chainId on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainId + + // define the payload + const requestData = { + params: [data.ep07, data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_9); + console.log(message.fail_addStake_9); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_addStake_9); + console.log(message.vali_addStake_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_9); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/arka/checkWhitelist.spec.js b/test/specs/mainnet/paymasterAPIs/arka/checkWhitelist.spec.js index faa41f2..8a2f3c7 100644 --- a/test/specs/mainnet/paymasterAPIs/arka/checkWhitelist.spec.js +++ b/test/specs/mainnet/paymasterAPIs/arka/checkWhitelist.spec.js @@ -3,7 +3,7 @@ dotenv.config(); // init dotenv import { ethers } from 'ethers'; import { assert } from 'chai'; import addContext from 'mochawesome/addContext.js'; -import Helper from '../../../../utils/helper.js'; +import helper from '../../../../utils/helper.js'; import data from '../../../../data/testData.json' assert { type: 'json' }; import { randomChainId, @@ -21,54 +21,131 @@ const header = { describe('Validate the check whitelist endpoint of the Arka', function () { it( - 'SMOKE: Validate the Check Whitelist endpoint which was already whitelisted with v1 of Arka on ' + + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } - // make the random address whitelisted + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_whitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url2, requestData, header); // perform assertions - assert.include(response.data.message, constant.add_whitelist_1); + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); // perform assertions - assert.equal(response.status, constant.successCode_1); + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_whitelistv1_1); + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint with v1 and userVp parameter of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; - // wait for the few seconds - Helper.wait(15000); + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; - // check the whitelist status + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); // perform assertions assert.include(response.data.message, constant.check_whitelist_1); // perform assertions - assert.equal(response.status, constant.successCode_1); + addContext(test, message.vali_checkWhitelist_0); + console.log(message.vali_checkWhitelist_0); } catch (e) { console.error(e); const eString = e.toString(); @@ -79,29 +156,137 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v1 of Arka on ' + + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; - // check the whitelist status + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v1 and userVp parameter of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); // perform assertions assert.include(response.data.message, constant.check_whitelist_2); // perform assertions - assert.equal(response.status, constant.successCode_1); + addContext(test, message.vali_checkWhitelist_00); + console.log(message.vali_checkWhitelist_00); } catch (e) { console.error(e); const eString = e.toString(); @@ -112,22 +297,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid url of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and invalid url of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid url + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_invalid, // invalid url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_2); console.log(message.fail_checkWhitelistv1_2); @@ -148,22 +333,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and incorrect url of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and incorrect url of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist_incorrect}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // incorrect url + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_incorrect, // incorrect url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_3); console.log(message.fail_checkWhitelistv1_3); @@ -184,24 +369,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid address of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and invalid address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + // define the payload + const requestData = { + params: [data.invalid_sponsorAddress], // invalid addres + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [ - data.invalid_sponsorAddress, // invalid address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_4); console.log(message.fail_checkWhitelistv1_4); @@ -222,24 +405,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and incorrect address of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and incorrect address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.incorrect_sponsorAddress], // incorrect addres + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [ - data.incorrect_sponsorAddress, // incorrect address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_44); console.log(message.fail_checkWhitelistv1_44); @@ -260,23 +441,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without address of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and without address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + // define the payload + const requestData = { + params: [], // without addres + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [ - randomChainId, // without address - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_5); console.log(message.fail_checkWhitelistv1_5); @@ -297,22 +477,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid apikey of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and invalid apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid apikey + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [addresses, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_6); console.log(message.fail_checkWhitelistv1_6); @@ -333,22 +513,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without apikey of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and without apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist}?&chainId=${randomChainId}&useVp=true`; // without apikey + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [addresses, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_7); console.log(message.fail_checkWhitelistv1_7); @@ -369,22 +549,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without chainid of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and without chainid of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&useVp=true`; // without chainid + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [addresses, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_8); console.log(message.fail_checkWhitelistv1_8); @@ -405,84 +585,130 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'SMOKE: Validate the Check Whitelist endpoint which was already whitelisted with v2 of Arka on ' + + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; - // make the random address whitelisted + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY_ARKA], - }, - header - ); + const response = await axios.post(url1, requestData, header); // perform assertions - assert.include(response.data.message, constant.add_whitelist_1); + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); - // perform assertions - assert.equal(response.status, constant.successCode_1); + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelistv2_1); - } + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } - // wait for the few seconds - Helper.wait(15000); + // wait for the response + helper.wait(data.mediumTimeout); + } - // check the whitelist status + // validate the whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY_ARKA], - }, - header - ); + const response = await axios.post(url2, requestData, header); // perform assertions - assert.include(response.data.message, constant.check_whitelist_1); + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); // perform assertions - assert.equal(response.status, constant.successCode_1); + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelistv2_1); + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); } } ); it( - 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v2 of Arka on ' + + 'SMOKE: Validate the Check Whitelist endpoint which was already whitelisted with v1 of Arka on ' + randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; - // check the whitelist status + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [addresses], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY_ARKA], - }, - header - ); + const response = await axios.post(url, requestData, header); // perform assertions - assert.include(response.data.message, constant.check_whitelist_2); + assert.include(response.data.message, constant.check_whitelist_1); // perform assertions assert.equal(response.status, constant.successCode_1); @@ -490,75 +716,810 @@ describe('Validate the check whitelist endpoint of the Arka', function () { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_checkWhitelistv2_1); + assert.fail(message.fail_checkWhitelistv1_1); } } ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v2 and invalid url of Arka on ' + + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + var test = this; + // validate the remove whitelist address endpoint with v1 and userVp parameter try { - const response = await axios.post( - data.arka_checkwhitelist_v2_invalid, // invalid url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url1, requestData, header); - addContext(test, message.fail_checkWhitelistv2_2); - console.log(message.fail_checkWhitelistv2_2); + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_checkWhitelistv2_2); - console.log(message.vali_checkWhitelistv2_2); + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_checkWhitelistv2_2); + assert.fail(message.fail_removeWhitelistv1_00); } - } - } - ); - it( - 'REGRESSION: Validate the Check Whitelist endpoint with v2 and incorrect url of Arka on ' + - randomChainName + - ' Network', - async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // wait for the response + helper.wait(data.mediumTimeout); + } + // validate the remove whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_checkwhitelist_v2_incorrect, // incorrect url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url2, requestData, header); - addContext(test, message.fail_checkWhitelistv2_3); - console.log(message.fail_checkWhitelistv2_3); + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_checkWhitelistv2_3); - console.log(message.vali_checkWhitelistv2_3); + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); } else { - console.error(e); + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v1 of Arka on ' + + randomChainName + + ' Network', + async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [addresses], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_1); + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_2); + console.log(message.fail_checkWhitelistv1_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv1_2); + console.log(message.vali_checkWhitelistv1_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist_incorrect}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // incorrect url + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_3); + console.log(message.fail_checkWhitelistv1_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv1_3); + console.log(message.vali_checkWhitelistv1_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.invalid_sponsorAddress], // invalid address + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_4); + console.log(message.fail_checkWhitelistv1_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_checkWhitelistv1_4); + console.log(message.vali_checkWhitelistv1_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.incorrect_sponsorAddress], // incorrect address + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_44); + console.log(message.fail_checkWhitelistv1_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_checkWhitelistv1_44); + console.log(message.vali_checkWhitelistv1_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [], // without address + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_5); + console.log(message.fail_checkWhitelistv1_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_5); + console.log(message.vali_checkWhitelistv1_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_6); + console.log(message.fail_checkWhitelistv1_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_6); + console.log(message.vali_checkWhitelistv1_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_7); + console.log(message.fail_checkWhitelistv1_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_7); + console.log(message.vali_checkWhitelistv1_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_8); + console.log(message.fail_checkWhitelistv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_8); + console.log(message.vali_checkWhitelistv1_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_8); + } + } + } + ); + + it( + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was already whitelisted with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_1); + } + } + ); + + it( + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist_v2_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv2_2); + console.log(message.fail_checkWhitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv2_2); + console.log(message.vali_checkWhitelistv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist_v2_incorrect}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // incorrect url + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv2_3); + console.log(message.fail_checkWhitelistv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv2_3); + console.log(message.vali_checkWhitelistv2_3); + } else { + console.error(e); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_checkWhitelistv2_3); @@ -574,18 +1535,17 @@ describe('Validate the check whitelist endpoint of the Arka', function () { async function () { var test = this; + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.invalid_sponsorAddress], // invalid address + }; + + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [ - data.invalid_sponsorAddress, // invalid address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv2_4); console.log(message.fail_checkWhitelistv2_4); @@ -612,18 +1572,17 @@ describe('Validate the check whitelist endpoint of the Arka', function () { async function () { var test = this; + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.incorrect_sponsorAddress], // incorrect address + }; + + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [ - data.incorrect_sponsorAddress, // incorrect address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv2_4); console.log(message.fail_checkWhitelistv2_4); @@ -650,17 +1609,17 @@ describe('Validate the check whitelist endpoint of the Arka', function () { async function () { var test = this; + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [], // without address + }; + + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [ - randomChainId, // without address - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv2_5); console.log(message.fail_checkWhitelistv2_5); @@ -689,14 +1648,17 @@ describe('Validate the check whitelist endpoint of the Arka', function () { const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [addresses, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv2_6); console.log(message.fail_checkWhitelistv2_6); @@ -725,14 +1687,17 @@ describe('Validate the check whitelist endpoint of the Arka', function () { const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist_v2}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [addresses, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv2_7); console.log(message.fail_checkWhitelistv2_7); @@ -761,14 +1726,17 @@ describe('Validate the check whitelist endpoint of the Arka', function () { const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [addresses, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv2_8); console.log(message.fail_checkWhitelistv2_8); @@ -787,4 +1755,292 @@ describe('Validate the check whitelist endpoint of the Arka', function () { } } ); + + it( + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'REGRESSION: Validate the check whitelist endpoint with v1 and userVp parameter as a false of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=false`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty( + response.data.message, + message.vali_checkWhitelist_message + ); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.add_whitelist_3, + message.vali_checkWhitelist_messageText + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_1); + } + } + ); + + it( + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'REGRESSION: Validate the check whitelist endpoint with v1 and userVp parameter with invalid data of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=qwerty`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelist_13); + console.log(message.fail_checkWhitelist_13); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_3)) { + addContext(test, message.vali_checkWhitelist_13); + console.log(message.vali_checkWhitelist_13); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_13); + } + } + } + ); }); diff --git a/test/specs/mainnet/paymasterAPIs/arka/deployVp.spec.js b/test/specs/mainnet/paymasterAPIs/arka/deployVp.spec.js new file mode 100644 index 0000000..67b5b7c --- /dev/null +++ b/test/specs/mainnet/paymasterAPIs/arka/deployVp.spec.js @@ -0,0 +1,357 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_mainnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the deploy-vp endpoint of the Arka', function () { + it( + 'SMOKE: Validate the deploy-vp endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.ep06, data.ep07], + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the verifyingPaymaster parameter in the response + assert.isNotEmpty( + response.data.verifyingPaymaster, + message.vali_deployVerifyingPaymaster_verifyingPaymaster + ); + + // validate the txHash parameter in the response + assert.isNotEmpty( + response.data.txHash, + message.vali_deployVerifyingPaymaster_txHash + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_1)) { + addContext(test, message.vali_deployVerifyingPaymaster_1); + console.log(message.vali_deployVerifyingPaymaster_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_1); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka with invalid entry points on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.invalid_epversion], // invalid entry point + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_2); + console.log(message.fail_deployVerifyingPaymaster_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_2)) { + addContext(test, message.vali_deployVerifyingPaymaster_2); + console.log(message.vali_deployVerifyingPaymaster_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka with incorrect entry points on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.incorrect_epversion], // incorrect entry point + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_3); + console.log(message.fail_deployVerifyingPaymaster_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_2)) { + addContext(test, message.vali_deployVerifyingPaymaster_3); + console.log(message.vali_deployVerifyingPaymaster_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka without entry points on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [], // without entry point + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_4); + console.log(message.fail_deployVerifyingPaymaster_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_2)) { + addContext(test, message.vali_deployVerifyingPaymaster_4); + console.log(message.vali_deployVerifyingPaymaster_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka with incorrect apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.DATA_API_KEY}&chainId=${randomChainId}`; // incorrect apikey + + // define the payload + const requestData = { + params: [data.ep07], + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_5); + console.log(message.fail_deployVerifyingPaymaster_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_deployVerifyingPaymaster_5); + console.log(message.vali_deployVerifyingPaymaster_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka with invalid apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [data.ep07], + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_6); + console.log(message.fail_deployVerifyingPaymaster_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_deployVerifyingPaymaster_6); + console.log(message.vali_deployVerifyingPaymaster_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka without apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [data.ep07], + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_7); + console.log(message.fail_deployVerifyingPaymaster_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deployVerifyingPaymaster_7); + console.log(message.vali_deployVerifyingPaymaster_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka with invalid chainId on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${data.invalid_chainId}`; // invalid chainId + + // define the payload + const requestData = { + params: [data.ep07], + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_8); + console.log(message.fail_deployVerifyingPaymaster_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_network_3)) { + addContext(test, message.vali_deployVerifyingPaymaster_8); + console.log(message.vali_deployVerifyingPaymaster_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_8); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka without chainId on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainId + + // define the payload + const requestData = { + params: [data.ep07], + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_9); + console.log(message.fail_deployVerifyingPaymaster_9); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deployVerifyingPaymaster_9); + console.log(message.vali_deployVerifyingPaymaster_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_9); + } + } + } + ); +}); diff --git a/test/specs/mainnet/paymasterAPIs/arka/deposit.spec.js b/test/specs/mainnet/paymasterAPIs/arka/deposit.spec.js index 9009b2c..903eee2 100644 --- a/test/specs/mainnet/paymasterAPIs/arka/deposit.spec.js +++ b/test/specs/mainnet/paymasterAPIs/arka/deposit.spec.js @@ -23,17 +23,18 @@ describe('Validate the deposit endpoint of the Arka', function () { randomChainName + ' network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.value], + }; - // make the random address whitelisted + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [data.value, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); // perform assertions assert.include(response.data.message, constant.deposit_2); @@ -54,16 +55,18 @@ describe('Validate the deposit endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [data.value], + }; + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit_invalid, // invalid url - { - params: [data.value, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_2); console.log(message.fail_depositv1_2); @@ -88,16 +91,18 @@ describe('Validate the deposit endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit_incorrect}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // incorrect url + + // define the payload + const requestData = { + params: [data.value], + }; + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit_incorrect, // incorrect url - { - params: [data.value, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_3); console.log(message.fail_depositv1_3); @@ -117,25 +122,23 @@ describe('Validate the deposit endpoint of the Arka', function () { } ); - it.only( + it( 'REGRESSION: Validate the deposit endpoint with v1 and invalid value of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + // define the payload + const requestData = { + params: [data.invalidValue], // invalid value + }; + + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [ - data.invalidValue, // invalid value - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_4); console.log(message.fail_depositv1_4); @@ -155,25 +158,23 @@ describe('Validate the deposit endpoint of the Arka', function () { } ); - it.only( + it( 'REGRESSION: Validate the deposit endpoint with v1 and exceeded value of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.exceededValue], // exceeded value + }; + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [ - data.exceededValue, // exceeded value - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_44); console.log(message.fail_depositv1_44); @@ -193,24 +194,23 @@ describe('Validate the deposit endpoint of the Arka', function () { } ); - it.only( + it( 'REGRESSION: Validate the deposit endpoint with v1 and without value of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + // define the payload + const requestData = { + params: [], // without value + }; + + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [ - randomChainId, // without value - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_5); console.log(message.fail_depositv1_5); @@ -235,16 +235,18 @@ describe('Validate the deposit endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [data.value, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_6); console.log(message.fail_depositv1_6); @@ -269,16 +271,18 @@ describe('Validate the deposit endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?chainId=${randomChainId}`; // without apikey + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [data.value, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_7); console.log(message.fail_depositv1_7); @@ -303,16 +307,18 @@ describe('Validate the deposit endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [data.value, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_8); console.log(message.fail_depositv1_8); @@ -333,24 +339,32 @@ describe('Validate the deposit endpoint of the Arka', function () { ); it( - 'SMOKE: Validate the deposit endpoint with v2 of Arka on the ' + + 'SMOKE: Validate the deposit endpoint with v1 and userVp parameter of Arka on ' + randomChainName + - ' network', + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.value], + }; - // make the random address whitelisted + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [data.value, randomChainId, process.env.API_KEY_ARKA], - }, - header - ); + const response = await axios.post(url, requestData, header); - // perform assertions - assert.include(response.data.message, constant.deposit_2); + // validate the message parameter in the response + assert.isNotEmpty(response.data.message, message.vali_deposit_message); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.deposit_2, + message.vali_deposit_messageText + ); // perform assertions assert.equal(response.status, constant.successCode_1); @@ -358,289 +372,1091 @@ describe('Validate the deposit endpoint of the Arka', function () { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_1); + assert.fail(message.fail_deposit1_1); } } ); it( - 'REGRESSION: Validate the deposit endpoint with v2 and invalid url of Arka on ' + + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and invalid value on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.invalidValue], // invalid value + }; + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2_invalid, // invalid url - { - params: [data.value, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_2); - console.log(message.fail_depositv2_2); + addContext(test, message.fail_deposit_10); + console.log(message.fail_deposit_10); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_depositv2_2); - console.log(message.vali_depositv2_2); + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_10); + console.log(message.vali_deposit_10); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_2); + assert.fail(message.fail_deposit_10); } } } ); it( - 'REGRESSION: Validate the deposit endpoint with v2 and incorrect url of Arka on ' + + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and exceeded value on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.exceededValue], // exceeded value + }; + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2_incorrect, // incorrect url - { - params: [data.value, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_3); - console.log(message.fail_depositv2_3); + addContext(test, message.fail_deposit_11); + console.log(message.fail_deposit_11); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_depositv2_3); - console.log(message.vali_depositv2_3); + if (error.includes(constant.deposit_3)) { + addContext(test, message.vali_deposit_11); + console.log(message.vali_deposit_11); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_3); + assert.fail(message.fail_deposit_11); } } } ); - it.only( - 'REGRESSION: Validate the deposit endpoint with v2 and invalid value of Arka on ' + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and without value on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + // define the payload + const requestData = { + params: [], // without value + }; + + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [ - data.invalidValue, // invalid value - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_4); - console.log(message.fail_depositv2_4); + addContext(test, message.fail_deposit_12); + console.log(message.fail_deposit_12); } catch (e) { const error = e.response.data.error; if (error.includes(constant.invalid_data)) { - addContext(test, message.vali_depositv2_4); - console.log(message.vali_depositv2_4); + addContext(test, message.vali_deposit_12); + console.log(message.vali_deposit_12); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_4); + assert.fail(message.fail_deposit_12); } } } ); - it.only( - 'REGRESSION: Validate the deposit endpoint with v2 and exceeded value of Arka on ' + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and incorrect apikey on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.DATA_API_KEY}&chainId=${randomChainId}&useVp=true`; // incorrect apikey + + // define the payload + const requestData = { + params: [data.value], + }; + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [ - data.exceededValue, // exceeded value - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_4); - console.log(message.fail_depositv2_4); + addContext(test, message.fail_deposit_5); + console.log(message.fail_deposit_5); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.deposit_3)) { - addContext(test, message.vali_depositv2_4); - console.log(message.vali_depositv2_4); + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_deposit_5); + console.log(message.vali_deposit_5); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_4); + assert.fail(message.fail_deposit_5); } } } ); - it.only( - 'REGRESSION: Validate the deposit endpoint with v2 and without value of Arka on ' + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and invalid apikey on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid apikey + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [ - randomChainId, // without value - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_5); - console.log(message.fail_depositv2_5); + addContext(test, message.fail_deposit_6); + console.log(message.fail_deposit_6); } catch (e) { const error = e.response.data.error; if (error.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_depositv2_5); - console.log(message.vali_depositv2_5); + addContext(test, message.vali_deposit_6); + console.log(message.vali_deposit_6); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_5); + assert.fail(message.fail_deposit_6); } } } ); it( - 'REGRESSION: Validate the deposit endpoint with v2 and invalid apikey of Arka on ' + + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and without apikey on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?chainId=${randomChainId}&useVp=true`; // without apikey + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [data.value, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_6); - console.log(message.fail_depositv2_6); + addContext(test, message.fail_deposit_7); + console.log(message.fail_deposit_7); } catch (e) { const error = e.response.data.error; if (error.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_depositv2_6); - console.log(message.vali_depositv2_6); + addContext(test, message.vali_deposit_7); + console.log(message.vali_deposit_7); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_6); + assert.fail(message.fail_deposit_7); } } } ); it( - 'REGRESSION: Validate the deposit endpoint with v2 and without apikey of Arka on ' + + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and invalid chainId on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${data.invalid_chainId}&useVp=true`; // invalid chainId + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [data.value, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_7); - console.log(message.fail_depositv2_7); + addContext(test, message.fail_deposit_8); + console.log(message.fail_deposit_8); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_depositv2_7); - console.log(message.vali_depositv2_7); + if (error.includes(constant.invalid_network_3)) { + addContext(test, message.vali_deposit_8); + console.log(message.vali_deposit_8); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_7); + assert.fail(message.fail_deposit_8); } } } ); it( - 'REGRESSION: Validate the deposit endpoint with v2 and without chainid of Arka on ' + + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and without chainId on ' + randomChainName + ' Network', async function () { + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&useVp=true`; // without chainId + + // define the payload + const requestData = { + params: [data.value], + }; + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_9); + console.log(message.fail_deposit_9); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_9); + console.log(message.vali_deposit_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_9); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter with false value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=false`; // false value + + // define the payload + const requestData = { + params: [data.value], + }; + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [data.value, process.env.API_KEY], // without chainid - }, - header + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty(response.data.message, message.vali_deposit_message); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.deposit_2, + message.vali_deposit_messageText ); - addContext(test, message.fail_depositv2_8); - console.log(message.fail_depositv2_8); + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit2_1); + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter with invalid data on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=qwerty`; // invalid data in userVp parameter + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_13); + console.log(message.fail_deposit_13); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_depositv2_8); - console.log(message.vali_depositv2_8); + if (error.includes(constant.deployVp_3)) { + addContext(test, message.vali_deposit_13); + console.log(message.vali_deposit_13); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_8); + assert.fail(message.fail_deposit_13); + } + } + } + ); + + it( + 'SMOKE: Validate the deposit endpoint with v2 of Arka on the ' + + randomChainName + + ' network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + // perform assertions + assert.include(response.data.message, constant.deposit_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_2); + console.log(message.fail_depositv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_depositv2_2); + console.log(message.vali_depositv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2_incorrect}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // incorrect url + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_3); + console.log(message.fail_depositv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_depositv2_3); + console.log(message.vali_depositv2_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and invalid value of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.invalidValue], // invalid value + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_4); + console.log(message.fail_depositv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_depositv2_4); + console.log(message.vali_depositv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and exceeded value of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.exceededValue], // exceeded value + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_4); + console.log(message.fail_depositv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deposit_3)) { + addContext(test, message.vali_depositv2_4); + console.log(message.vali_depositv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and without value of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [], // without value + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_5); + console.log(message.fail_depositv2_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_5); + console.log(message.vali_depositv2_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_6); + console.log(message.fail_depositv2_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_6); + console.log(message.vali_depositv2_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_7); + console.log(message.fail_depositv2_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_7); + console.log(message.vali_depositv2_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_8); + console.log(message.fail_depositv2_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_8); + console.log(message.vali_depositv2_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_8); + } + } + } + ); + + it( + 'SMOKE: Validate the deposit endpoint with v2 and userVp parameter of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty(response.data.message, message.vali_deposit_message); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.deposit_2, + message.vali_deposit_messageText + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit1_1); + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and invalid value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.invalidValue], // invalid value + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_10); + console.log(message.fail_deposit_10); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_10); + console.log(message.vali_deposit_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_10); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and exceeded value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.exceededValue], // exceeded value + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_11); + console.log(message.fail_deposit_11); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deposit_3)) { + addContext(test, message.vali_deposit_11); + console.log(message.vali_deposit_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_11); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and without value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [], // without value + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_12); + console.log(message.fail_deposit_12); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_12); + console.log(message.vali_deposit_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_12); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and incorrect apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.DATA_API_KEY}&chainId=${randomChainId}&useVp=true`; // incorrect apikey + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_5); + console.log(message.fail_deposit_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_deposit_5); + console.log(message.vali_deposit_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and invalid apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid apikey + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_6); + console.log(message.fail_deposit_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_deposit_6); + console.log(message.vali_deposit_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and without apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?chainId=${randomChainId}&useVp=true`; // without apikey + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_7); + console.log(message.fail_deposit_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_deposit_7); + console.log(message.vali_deposit_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and invalid chainId on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${data.invalid_chainId}&useVp=true`; // invalid chainId + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_8); + console.log(message.fail_deposit_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_network_3)) { + addContext(test, message.vali_deposit_8); + console.log(message.vali_deposit_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_8); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and without chainId on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&useVp=true`; // without chainId + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_9); + console.log(message.fail_deposit_9); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_9); + console.log(message.vali_deposit_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_9); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter with false value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=false`; // false value + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty(response.data.message, message.vali_deposit_message); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.deposit_2, + message.vali_deposit_messageText + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit2_1); + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter with invalid data on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=qwerty`; // invalid data in userVp parameter + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_13); + console.log(message.fail_deposit_13); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_3)) { + addContext(test, message.vali_deposit_13); + console.log(message.vali_deposit_13); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_13); } } } diff --git a/test/specs/mainnet/paymasterAPIs/arka/getAllWhitelist.spec.js b/test/specs/mainnet/paymasterAPIs/arka/getAllWhitelist.spec.js index 275b4a2..2414a79 100644 --- a/test/specs/mainnet/paymasterAPIs/arka/getAllWhitelist.spec.js +++ b/test/specs/mainnet/paymasterAPIs/arka/getAllWhitelist.spec.js @@ -34,7 +34,7 @@ describe('Validate the get all whitelist endpoint of the Arka', function () { const response = await axios.post( data.arka_whitelist_v2, { - params: [addresses, randomChainId, process.env.API_KEY], + params: [addresses, randomChainId, process.env.ARKA_API_KEY_PROD], }, header ); @@ -59,7 +59,7 @@ describe('Validate the get all whitelist endpoint of the Arka', function () { const response = await axios.post( data.arka_getAllWhitelist_v2, { - params: ['1', randomChainId, process.env.API_KEY], + params: ['1', randomChainId, process.env.ARKA_API_KEY_PROD], }, header ); @@ -91,7 +91,7 @@ describe('Validate the get all whitelist endpoint of the Arka', function () { const response = await axios.post( data.arka_getAllWhitelist_v2, { - params: ['1', randomChainId, process.env.API_KEY_ARKA], + params: ['1', randomChainId, process.env.ARKA_API_KEY_PROD], }, header ); @@ -127,7 +127,7 @@ describe('Validate the get all whitelist endpoint of the Arka', function () { const response = await axios.post( data.arka_getAllWhitelist_invalid, // invalid url { - params: ['1', randomChainId, process.env.API_KEY], + params: ['1', randomChainId, process.env.ARKA_API_KEY_PROD], }, header ); @@ -163,7 +163,7 @@ describe('Validate the get all whitelist endpoint of the Arka', function () { const response = await axios.post( data.arka_getAllWhitelist_incorrect, // incorrect url { - params: ['1', randomChainId, process.env.API_KEY], + params: ['1', randomChainId, process.env.ARKA_API_KEY_PROD], }, header ); @@ -271,7 +271,7 @@ describe('Validate the get all whitelist endpoint of the Arka', function () { const response = await axios.post( data.arka_getAllWhitelist, { - params: ['1', process.env.API_KEY], // without chainid + params: ['1', process.env.ARKA_API_KEY_PROD], // without chainid }, header ); diff --git a/test/specs/mainnet/paymasterAPIs/arka/metadata.spec.js b/test/specs/mainnet/paymasterAPIs/arka/metadata.spec.js index 0c1c94f..d6ab1ed 100644 --- a/test/specs/mainnet/paymasterAPIs/arka/metadata.spec.js +++ b/test/specs/mainnet/paymasterAPIs/arka/metadata.spec.js @@ -30,7 +30,10 @@ describe('Validate the metadata endpoint of the Arka', function () { const response = await axios.get( data.arka_metadata, { - params: { chainId: randomChainId, apiKey: process.env.API_KEY }, + params: { + chainId: randomChainId, + apiKey: process.env.ARKA_API_KEY_PROD, + }, }, header ); @@ -95,7 +98,10 @@ describe('Validate the metadata endpoint of the Arka', function () { const response = await axios.post( data.arka_metadata_invalid, // invalid url { - params: { chainId: randomChainId, apiKey: process.env.API_KEY }, + params: { + chainId: randomChainId, + apiKey: process.env.ARKA_API_KEY_PROD, + }, }, header ); @@ -131,7 +137,10 @@ describe('Validate the metadata endpoint of the Arka', function () { const response = await axios.post( data.arka_metadata_incorrect, // incorrect url { - params: { chainId: randomChainId, apiKey: process.env.API_KEY }, + params: { + chainId: randomChainId, + apiKey: process.env.ARKA_API_KEY_PROD, + }, }, header ); @@ -239,7 +248,7 @@ describe('Validate the metadata endpoint of the Arka', function () { const response = await axios.post( data.arka_metadata, { - params: { apiKey: process.env.API_KEY }, // without chainid + params: { apiKey: process.env.ARKA_API_KEY_PROD }, // without chainid }, header ); diff --git a/test/specs/mainnet/paymasterAPIs/arka/pimlicoAddress.spec.js b/test/specs/mainnet/paymasterAPIs/arka/pimlicoAddress.spec.js index 62b0695..de0157f 100644 --- a/test/specs/mainnet/paymasterAPIs/arka/pimlicoAddress.spec.js +++ b/test/specs/mainnet/paymasterAPIs/arka/pimlicoAddress.spec.js @@ -34,7 +34,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { const response = await axios.post( data.arka_whitelist, { - params: [addresses, randomChainId, process.env.API_KEY], + params: [addresses, randomChainId, process.env.ARKA_API_KEY_PROD], }, header ); @@ -63,7 +63,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { addresses, { token: data.usdc_token }, randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -97,7 +97,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { addresses, { token: data.usdc_token }, randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -138,7 +138,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { addresses, { token: data.usdc_token }, randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -177,7 +177,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { data.invalid_sponsorAddress, // invalid address { token: data.usdc_token }, randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -216,7 +216,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { data.incorrect_sponsorAddress, // incorrect address { token: data.usdc_token }, randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -254,7 +254,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { params: [ { token: data.usdc_token }, randomChainId, // without address - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -371,7 +371,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { params: [ addresses, { token: data.usdc_token }, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], // without chainid }, header @@ -412,7 +412,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { addresses, { token: data.invalid_usdc_token }, // invalid token randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -453,7 +453,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { addresses, // without token randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header diff --git a/test/specs/mainnet/paymasterAPIs/arka/removeWhitelist.spec.js b/test/specs/mainnet/paymasterAPIs/arka/removeWhitelist.spec.js index 2803623..48d16b4 100644 --- a/test/specs/mainnet/paymasterAPIs/arka/removeWhitelist.spec.js +++ b/test/specs/mainnet/paymasterAPIs/arka/removeWhitelist.spec.js @@ -3,13 +3,13 @@ dotenv.config(); // init dotenv import { ethers } from 'ethers'; import { assert } from 'chai'; import addContext from 'mochawesome/addContext.js'; -import Helper from '../../../../utils/helper.js'; import data from '../../../../data/testData.json' assert { type: 'json' }; import { randomChainId, randomChainName, } from '../../../../utils/sharedData_mainnet.js'; import axios from 'axios'; +import helper from '../../../../utils/helper.js'; import message from '../../../../data/messages.json' assert { type: 'json' }; import constant from '../../../../data/constant.json' assert { type: 'json' }; @@ -21,202 +21,213 @@ const header = { describe('Validate the remove whitelist endpoint of the Arka', function () { it( - 'SMOKE: Validate the Remove Whitelist endpoint which was already whitelisted with v1 of Arka on ' + + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; - // make the random address whitelisted + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url1, requestData, header); // perform assertions - assert.include(response.data.message, constant.add_whitelist_1); + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); - // perform assertions - assert.equal(response.status, constant.successCode_1); + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_whitelistv1_1); - } + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } - // wait for the few seconds - Helper.wait(15000); + // wait for the response + helper.wait(data.mediumTimeout); + } - // validate the remove whitelist endpoint + // validate the whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url2, requestData, header); // perform assertions - assert.include(response.data.message, constant.remove_whitelist_2); + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); - // perform assertions - assert.equal(response.status, constant.successCode_1); + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelistv1_1); - } - } - ); + const error = e.response.data.error; - it( - 'REGRESSION: Validate the Remove Whitelist endpoint which was not whitelisted with v1 of Arka on ' + - randomChainName + - ' Network', - async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } - // validate remove whitelist endpoint + // validate the whitelist address endpoint with v2 and without userVp parameter try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); - addContext(test, message.fail_removeWhitelistv1_0); - console.log(message.fail_removeWhitelistv1_0); + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelistv1_1); - console.log(message.vali_removeWhitelistv1_1); + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv1_0); + assert.fail(message.fail_whitelistv2_0); } + + // wait for the response + helper.wait(data.mediumTimeout); } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and invalid url of Arka on ' + + 'SMOKE: Validate the remove whitelist endpoint with v1 and userVp parameter of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_invalid, // invalid url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty( + response.data.message, + message.vali_removeWhitelist_message ); - addContext(test, message.fail_removeWhitelistv1_2); - console.log(message.fail_removeWhitelistv1_2); - } catch (e) { - const error = e.response.data.error; + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.remove_whitelist_3, + message.vali_removeWhitelist_messageText + ); - if (error.includes(constant.not_found)) { - addContext(test, message.vali_removeWhitelistv1_2); - console.log(message.vali_removeWhitelistv1_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelistv1_2); - } + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_1); } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and incorrect url of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and invalid url of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid url + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_incorrect, // incorrect url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_removeWhitelistv1_3); - console.log(message.fail_removeWhitelistv1_3); + addContext(test, message.fail_removeWhitelistv1_2); + console.log(message.fail_removeWhitelistv1_2); } catch (e) { const error = e.response.data.error; if (error.includes(constant.not_found)) { - addContext(test, message.vali_removeWhitelistv1_3); - console.log(message.vali_removeWhitelistv1_3); + addContext(test, message.vali_removeWhitelistv1_2); + console.log(message.vali_removeWhitelistv1_2); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv1_3); + assert.fail(message.fail_removeWhitelistv1_2); } } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and invalid address of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and invalid address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.invalid_sponsorAddress]], // invalid address + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [ - data.invalid_sponsorAddress, // invalid address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv1_4); console.log(message.fail_removeWhitelistv1_4); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { + if (error.includes(constant.add_whitelist_4)) { addContext(test, message.vali_removeWhitelistv1_4); console.log(message.vali_removeWhitelistv1_4); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv1_4); @@ -226,35 +237,33 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and incorrect address of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and incorrect address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.incorrect_sponsorAddress]], // incorrect address + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [ - data.incorrect_sponsorAddress, // incorrect address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv1_44); console.log(message.fail_removeWhitelistv1_44); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { + if (error.includes(constant.add_whitelist_4)) { addContext(test, message.vali_removeWhitelistv1_44); console.log(message.vali_removeWhitelistv1_44); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv1_44); @@ -264,23 +273,22 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and without address of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and without address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[]], // without address + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [ - randomChainId, // without address - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv1_5); console.log(message.fail_removeWhitelistv1_5); @@ -291,7 +299,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv1_5); console.log(message.vali_removeWhitelistv1_5); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv1_5); @@ -301,22 +309,22 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and invalid apikey of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and invalid apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid apikey + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [addresses, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv1_6); console.log(message.fail_removeWhitelistv1_6); @@ -327,7 +335,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv1_6); console.log(message.vali_removeWhitelistv1_6); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv1_6); @@ -337,22 +345,22 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and without apikey of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and without apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist}?chainId=${randomChainId}&useVp=true`; // without apikey + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [addresses, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv1_7); console.log(message.fail_removeWhitelistv1_7); @@ -363,7 +371,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv1_7); console.log(message.vali_removeWhitelistv1_7); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv1_7); @@ -373,22 +381,22 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and without chainid of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and without chainid of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&useVp=true`; // without chainid + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [addresses, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv1_8); console.log(message.fail_removeWhitelistv1_8); @@ -399,7 +407,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv1_8); console.log(message.vali_removeWhitelistv1_8); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv1_8); @@ -409,51 +417,127 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'SMOKE: Validate the Remove Whitelist endpoint which was already whitelisted with v2 of Arka on ' + + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } - // make the random address whitelisted + // validate the whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY_ARKA], - }, - header - ); + const response = await axios.post(url2, requestData, header); // perform assertions - assert.include(response.data.message, constant.add_whitelist_1); + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); // perform assertions - assert.equal(response.status, constant.successCode_1); + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_whitelistv2_1); + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); } + } + ); - // wait for the few seconds - Helper.wait(15000); + it( + 'SMOKE: Validate the remove Whitelist endpoint with v1 and without userVp parameter of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY_ARKA], - }, - header - ); + const response = await axios.post(url, requestData, header); // perform assertions - assert.include(response.data.message, constant.remove_whitelist_2); + assert.include(response.data.message, constant.remove_whitelist_3); // perform assertions assert.equal(response.status, constant.successCode_1); @@ -461,214 +545,537 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv2_1); + assert.fail(message.fail_removeWhitelistv1_1); } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint which was not whitelisted with v2 of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and invalid url of Arka on ' + randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; - // validate remove whitelist endpoint + // define the url + const url = `${data.arka_removeWhitelist_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_removeWhitelistv2_2); - console.log(message.fail_removeWhitelistv2_2); + addContext(test, message.fail_removeWhitelistv1_2); + console.log(message.fail_removeWhitelistv1_2); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelistv2_1); - console.log(message.vali_removeWhitelistv2_1); + if (error.includes(constant.not_found)) { + addContext(test, message.vali_removeWhitelistv1_2); + console.log(message.vali_removeWhitelistv1_2); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv2_1); + assert.fail(message.fail_removeWhitelistv1_2); } } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and invalid url of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and invalid address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + // define the payload + const requestData = { + params: [[data.invalid_sponsorAddress]], // invalid address + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2_invalid, // invalid url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_removeWhitelistv2_2); - console.log(message.fail_removeWhitelistv2_2); + addContext(test, message.fail_removeWhitelistv1_4); + console.log(message.fail_removeWhitelistv1_4); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_removeWhitelistv2_2); - console.log(message.vali_removeWhitelistv2_2); + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_removeWhitelistv1_4); + console.log(message.vali_removeWhitelistv1_4); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv2_2); + assert.fail(message.fail_removeWhitelistv1_4); } } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and incorrect url of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and incorrect address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + // define the payload + const requestData = { + params: [[data.incorrect_sponsorAddress]], // incorrect address + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2_incorrect, // incorrect url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_removeWhitelistv2_3); - console.log(message.fail_removeWhitelistv2_3); + addContext(test, message.fail_removeWhitelistv1_44); + console.log(message.fail_removeWhitelistv1_44); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_removeWhitelistv2_3); - console.log(message.vali_removeWhitelistv2_3); + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_removeWhitelistv1_44); + console.log(message.vali_removeWhitelistv1_44); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv2_3); + assert.fail(message.fail_removeWhitelistv1_44); } } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and invalid address of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and without address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[]], // without address + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [ - data.invalid_sponsorAddress, // invalid address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_removeWhitelistv2_4); - console.log(message.fail_removeWhitelistv2_4); + addContext(test, message.fail_removeWhitelistv1_5); + console.log(message.fail_removeWhitelistv1_5); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { - addContext(test, message.vali_removeWhitelistv2_4); - console.log(message.vali_removeWhitelistv2_4); + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_5); + console.log(message.vali_removeWhitelistv1_5); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv2_4); + assert.fail(message.fail_removeWhitelistv1_5); } } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and incorrect address of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and invalid apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [ - data.incorrect_sponsorAddress, // incorrect address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_removeWhitelistv2_44); - console.log(message.fail_removeWhitelistv2_44); + addContext(test, message.fail_removeWhitelistv1_6); + console.log(message.fail_removeWhitelistv1_6); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { - addContext(test, message.vali_removeWhitelistv2_44); - console.log(message.vali_removeWhitelistv2_44); - } else { - console.error(e); - const eString = e.toString(); + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_6); + console.log(message.vali_removeWhitelistv1_6); + } else { + console.error(e.response.data.error); + const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv2_44); + assert.fail(message.fail_removeWhitelistv1_6); } } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and without address of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and without apikey of Arka on ' + randomChainName + ' Network', async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_removeWhitelist}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_removeWhitelistv1_7); + console.log(message.fail_removeWhitelistv1_7); + } catch (e) { + const error = e.response.data.error; + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_7); + console.log(message.vali_removeWhitelistv1_7); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [ - randomChainId, // without address - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_removeWhitelistv1_8); + console.log(message.fail_removeWhitelistv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_removeWhitelistv1_8); + console.log(message.vali_removeWhitelistv1_8); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_8); + } + } + } + ); + + it( + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the remove Whitelist endpoint with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // perform assertions + assert.include(response.data.message, constant.remove_whitelist_3); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_removeWhitelist_v2_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_removeWhitelistv2_2); + console.log(message.fail_removeWhitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_removeWhitelistv2_2); + console.log(message.vali_removeWhitelistv2_2); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.invalid_sponsorAddress]], // invalid address + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_removeWhitelistv2_4); + console.log(message.fail_removeWhitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_removeWhitelistv2_4); + console.log(message.vali_removeWhitelistv2_4); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.incorrect_sponsorAddress]], // incorrect address + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_removeWhitelistv2_44); + console.log(message.fail_removeWhitelistv2_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_removeWhitelistv2_44); + console.log(message.vali_removeWhitelistv2_44); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[]], // without address + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv2_5); console.log(message.fail_removeWhitelistv2_5); @@ -679,7 +1086,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv2_5); console.log(message.vali_removeWhitelistv2_5); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv2_5); @@ -689,22 +1096,25 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and invalid apikey of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and invalid apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [addresses, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv2_6); console.log(message.fail_removeWhitelistv2_6); @@ -715,7 +1125,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv2_6); console.log(message.vali_removeWhitelistv2_6); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv2_6); @@ -725,22 +1135,25 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and without apikey of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and without apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist_v2}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [addresses, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv2_7); console.log(message.fail_removeWhitelistv2_7); @@ -751,7 +1164,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv2_7); console.log(message.vali_removeWhitelistv2_7); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv2_7); @@ -761,33 +1174,36 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and without chainid of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and without chainid of Arka on ' + randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [addresses, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv2_8); console.log(message.fail_removeWhitelistv2_8); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_apiKey)) { + if (error.includes(constant.invalid_data)) { addContext(test, message.vali_removeWhitelistv2_8); console.log(message.vali_removeWhitelistv2_8); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv2_8); @@ -795,4 +1211,286 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { } } ); + + it( + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'REGRESSION: Validate the remove whitelist endpoint with v1 and userVp parameter as a false of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=false`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty( + response.data.message, + message.vali_removeWhitelist_message + ); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.remove_whitelist_3, + message.vali_removeWhitelist_messageText + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_1); + } + } + ); + + it( + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'REGRESSION: Validate the remove whitelist endpoint with v1 and userVp parameter with invalid data of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=qwerty`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_removeWhitelist_13); + console.log(message.fail_removeWhitelist_13); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_3)) { + addContext(test, message.vali_removeWhitelist_13); + console.log(message.vali_removeWhitelist_13); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_13); + } + } + } + ); }); diff --git a/test/specs/mainnet/paymasterAPIs/arka/whitelist.spec.js b/test/specs/mainnet/paymasterAPIs/arka/whitelist.spec.js index dc3a8af..3fcc210 100644 --- a/test/specs/mainnet/paymasterAPIs/arka/whitelist.spec.js +++ b/test/specs/mainnet/paymasterAPIs/arka/whitelist.spec.js @@ -9,6 +9,7 @@ import { randomChainName, } from '../../../../utils/sharedData_mainnet.js'; import axios from 'axios'; +import helper from '../../../../utils/helper.js'; import message from '../../../../data/messages.json' assert { type: 'json' }; import constant from '../../../../data/constant.json' assert { type: 'json' }; @@ -20,139 +21,219 @@ const header = { describe('Validate the whitelist endpoint of the Arka', function () { it( - 'SMOKE: Validate the Whitelist endpoint with v1 of Arka on ' + + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + // validate the remove whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_whitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url2, requestData, header); // perform assertions - assert.include(response.data.message, constant.add_whitelist_1); + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); // perform assertions - assert.equal(response.status, constant.successCode_1); + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_whitelistv1_1); + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid url of Arka on ' + + 'SMOKE: Validate the whitelist endpoint with v1 and userVp parameter of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_invalid, // invalid url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty( + response.data.message, + message.vali_whitelist_message ); - addContext(test, message.fail_whitelistv1_2); - console.log(message.fail_whitelistv1_2); - } catch (e) { - const error = e.response.data.error; + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.add_whitelist_3, + message.vali_whitelist_messageText + ); - if (error.includes(constant.not_found)) { - addContext(test, message.vali_whitelistv1_2); - console.log(message.vali_whitelistv1_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_whitelistv1_2); - } + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelist_1); } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and incorrect url of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and invalid url of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid url + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_incorrect, // incorrect url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_whitelistv1_3); - console.log(message.fail_whitelistv1_3); + addContext(test, message.fail_whitelistv1_2); + console.log(message.fail_whitelistv1_2); } catch (e) { const error = e.response.data.error; if (error.includes(constant.not_found)) { - addContext(test, message.vali_whitelistv1_3); - console.log(message.vali_whitelistv1_3); + addContext(test, message.vali_whitelistv1_2); + console.log(message.vali_whitelistv1_2); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv1_3); + assert.fail(message.fail_whitelistv1_2); } } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid address of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and invalid address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + // define the payload + const requestData = { + params: [[data.invalid_sponsorAddress]], // invalid address + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist, - { - params: [ - data.invalid_sponsorAddress, // invalid address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv1_4); console.log(message.fail_whitelistv1_4); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { + if (error.includes(constant.add_whitelist_4)) { addContext(test, message.vali_whitelistv1_4); console.log(message.vali_whitelistv1_4); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv1_4); @@ -162,35 +243,33 @@ describe('Validate the whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and incorrect address of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and incorrect address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.incorrect_sponsorAddress]], // incorrect address + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist, - { - params: [ - data.incorrect_sponsorAddress, // incorrect address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv1_44); console.log(message.fail_whitelistv1_44); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { + if (error.includes(constant.add_whitelist_4)) { addContext(test, message.vali_whitelistv1_44); console.log(message.vali_whitelistv1_44); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv1_44); @@ -200,23 +279,22 @@ describe('Validate the whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and without address of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and without address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + // define the payload + const requestData = { + params: [[]], // without address + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist, - { - params: [ - randomChainId, // without address - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv1_5); console.log(message.fail_whitelistv1_5); @@ -227,7 +305,7 @@ describe('Validate the whitelist endpoint of the Arka', function () { addContext(test, message.vali_whitelistv1_5); console.log(message.vali_whitelistv1_5); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv1_5); @@ -237,22 +315,22 @@ describe('Validate the whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid apikey of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and invalid apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid apikey + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist, - { - params: [addresses, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv1_6); console.log(message.fail_whitelistv1_6); @@ -263,7 +341,7 @@ describe('Validate the whitelist endpoint of the Arka', function () { addContext(test, message.vali_whitelistv1_6); console.log(message.vali_whitelistv1_6); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv1_6); @@ -273,22 +351,22 @@ describe('Validate the whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and without apikey of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and without apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist}?chainId=${randomChainId}&useVp=true`; // without apikey + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist, - { - params: [addresses, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv1_7); console.log(message.fail_whitelistv1_7); @@ -299,7 +377,7 @@ describe('Validate the whitelist endpoint of the Arka', function () { addContext(test, message.vali_whitelistv1_7); console.log(message.vali_whitelistv1_7); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv1_7); @@ -309,22 +387,22 @@ describe('Validate the whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and without chainid of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and without chainid of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&useVp=true`; // without chainid + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist, - { - params: [addresses, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv1_8); console.log(message.fail_whitelistv1_8); @@ -335,7 +413,7 @@ describe('Validate the whitelist endpoint of the Arka', function () { addContext(test, message.vali_whitelistv1_8); console.log(message.vali_whitelistv1_8); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv1_8); @@ -345,25 +423,133 @@ describe('Validate the whitelist endpoint of the Arka', function () { ); it( - 'SMOKE: Validate the Whitelist endpoint with v2 of Arka on ' + + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the Whitelist endpoint with v1 and without userVp parameter of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); // perform assertions - assert.include(response.data.message, constant.add_whitelist_1); + assert.include(response.data.message, constant.add_whitelist_3); // perform assertions assert.equal(response.status, constant.successCode_1); @@ -371,213 +557,582 @@ describe('Validate the whitelist endpoint of the Arka', function () { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv2_1); + assert.fail(message.fail_whitelistv1_1); } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid url of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid url of Arka on ' + randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2_invalid, // invalid url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_whitelistv2_2); - console.log(message.fail_whitelistv2_2); + addContext(test, message.fail_whitelistv1_2); + console.log(message.fail_whitelistv1_2); } catch (e) { const error = e.response.data.error; if (error.includes(constant.not_found)) { - addContext(test, message.vali_whitelistv2_2); - console.log(message.vali_whitelistv2_2); + addContext(test, message.vali_whitelistv1_2); + console.log(message.vali_whitelistv1_2); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv2_2); + assert.fail(message.fail_whitelistv1_2); } } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v2 and incorrect url of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.invalid_sponsorAddress]], // invalid address + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2_incorrect, // incorrect url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_whitelistv2_3); - console.log(message.fail_whitelistv2_3); + addContext(test, message.fail_whitelistv1_4); + console.log(message.fail_whitelistv1_4); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_whitelistv2_3); - console.log(message.vali_whitelistv2_3); + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_whitelistv1_4); + console.log(message.vali_whitelistv1_4); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv2_3); + assert.fail(message.fail_whitelistv1_4); } } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid address of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1 and incorrect address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.incorrect_sponsorAddress]], // incorrect address + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [ - data.invalid_sponsorAddress, // invalid address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_whitelistv2_4); - console.log(message.fail_whitelistv2_4); + addContext(test, message.fail_whitelistv1_44); + console.log(message.fail_whitelistv1_44); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { - addContext(test, message.vali_whitelistv2_4); - console.log(message.vali_whitelistv2_4); + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_whitelistv1_44); + console.log(message.vali_whitelistv1_44); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv2_4); + assert.fail(message.fail_whitelistv1_44); } } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v2 and incorrect address of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1 and without address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[]], // without address + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [ - data.incorrect_sponsorAddress, // incorrect address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_whitelistv2_4); - console.log(message.fail_whitelistv2_4); + addContext(test, message.fail_whitelistv1_5); + console.log(message.fail_whitelistv1_5); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { - addContext(test, message.vali_whitelistv2_4); - console.log(message.vali_whitelistv2_4); + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv1_5); + console.log(message.vali_whitelistv1_5); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv2_4); + assert.fail(message.fail_whitelistv1_5); } } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v2 and without address of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [ - randomChainId, // without address - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_whitelistv2_5); - console.log(message.fail_whitelistv2_5); + addContext(test, message.fail_whitelistv1_6); + console.log(message.fail_whitelistv1_6); } catch (e) { const error = e.response.data.error; if (error.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_whitelistv2_5); - console.log(message.vali_whitelistv2_5); + addContext(test, message.vali_whitelistv1_6); + console.log(message.vali_whitelistv1_6); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv2_5); + assert.fail(message.fail_whitelistv1_6); } } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid apikey of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1 and without apikey of Arka on ' + randomChainName + ' Network', async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_whitelist}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelistv1_7); + console.log(message.fail_whitelistv1_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv1_7); + console.log(message.vali_whitelistv1_7); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [addresses, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelistv1_8); + console.log(message.fail_whitelistv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_whitelistv1_8); + console.log(message.vali_whitelistv1_8); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_8); + } + } + } + ); + + it( + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the Whitelist endpoint with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_3); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_whitelist_v2_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelistv2_2); + console.log(message.fail_whitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_whitelistv2_2); + console.log(message.vali_whitelistv2_2); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.invalid_sponsorAddress]], // invalid address + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelistv2_4); + console.log(message.fail_whitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_whitelistv2_4); + console.log(message.vali_whitelistv2_4); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.incorrect_sponsorAddress]], // incorrect address + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelistv2_44); + console.log(message.fail_whitelistv2_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_whitelistv2_44); + console.log(message.vali_whitelistv2_44); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[]], // without address + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelistv2_5); + console.log(message.fail_whitelistv2_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv2_5); + console.log(message.vali_whitelistv2_5); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_whitelist_v2}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv2_6); console.log(message.fail_whitelistv2_6); @@ -588,7 +1143,7 @@ describe('Validate the whitelist endpoint of the Arka', function () { addContext(test, message.vali_whitelistv2_6); console.log(message.vali_whitelistv2_6); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv2_6); @@ -602,18 +1157,21 @@ describe('Validate the whitelist endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist_v2}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [addresses, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv2_7); console.log(message.fail_whitelistv2_7); @@ -624,7 +1182,7 @@ describe('Validate the whitelist endpoint of the Arka', function () { addContext(test, message.vali_whitelistv2_7); console.log(message.vali_whitelistv2_7); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv2_7); @@ -638,29 +1196,32 @@ describe('Validate the whitelist endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [addresses, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv2_8); console.log(message.fail_whitelistv2_8); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_apiKey)) { + if (error.includes(constant.add_whitelist_4)) { addContext(test, message.vali_whitelistv2_8); console.log(message.vali_whitelistv2_8); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv2_8); @@ -668,4 +1229,298 @@ describe('Validate the whitelist endpoint of the Arka', function () { } } ); + + it( + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'REGRESSION: Validate the whitelist endpoint with v1 and userVp parameter as a false of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=false`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty( + response.data.message, + message.vali_whitelist_message + ); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.add_whitelist_3, + message.vali_whitelist_messageText + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelist_1); + } + } + ); + + it( + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'REGRESSION: Validate the whitelist endpoint with v1 and userVp parameter with invalid data of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=qwerty`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelist_13); + console.log(message.fail_whitelist_13); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_3)) { + addContext(test, message.vali_whitelist_13); + console.log(message.vali_whitelist_13); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelist_13); + } + } + } + ); }); diff --git a/test/specs/testnet/paymasterAPIs/arka/addStake.spec.js b/test/specs/testnet/paymasterAPIs/arka/addStake.spec.js new file mode 100644 index 0000000..1daeabc --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/arka/addStake.spec.js @@ -0,0 +1,456 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the add stake endpoint of the Arka', function () { + it( + 'SMOKE: Validate the add stake endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.ep07, data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty(response.data.message, message.vali_addStake_message); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.addStake_1, + message.vali_addStake_messageText + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_1); + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with invalid entry points on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.invalid_epversion, data.value], // invalid entry point + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_2); + console.log(message.fail_addStake_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_2)) { + addContext(test, message.vali_addStake_2); + console.log(message.vali_addStake_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with incorrect entry points on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.incorrect_epversion, data.value], // incorrect entry point + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_3); + console.log(message.fail_addStake_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_2)) { + addContext(test, message.vali_addStake_3); + console.log(message.vali_addStake_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka without entry points on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.value], // without entry point + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_4); + console.log(message.fail_addStake_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.addStake_2)) { + addContext(test, message.vali_addStake_4); + console.log(message.vali_addStake_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with invalid value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.ep07, data.invalidValue], // invalid value + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_10); + console.log(message.fail_addStake_10); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.addStake_2)) { + addContext(test, message.vali_addStake_10); + console.log(message.vali_addStake_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_10); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with exceeded value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.ep07, data.exceededValue], // exceeded value + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_11); + console.log(message.fail_addStake_11); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.addStake_3)) { + addContext(test, message.vali_addStake_11); + console.log(message.vali_addStake_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_11); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka without value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.ep07], // without value + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_12); + console.log(message.fail_addStake_12); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.addStake_2)) { + addContext(test, message.vali_addStake_12); + console.log(message.vali_addStake_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_12); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with incorrect apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.DATA_API_KEY}&chainId=${randomChainId}`; // incorrect apikey + + // define the payload + const requestData = { + params: [data.ep07, data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_5); + console.log(message.fail_addStake_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_addStake_5); + console.log(message.vali_addStake_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with invalid apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [data.ep07, data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_6); + console.log(message.fail_addStake_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_addStake_6); + console.log(message.vali_addStake_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka without apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [data.ep07, data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_7); + console.log(message.fail_addStake_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_addStake_7); + console.log(message.vali_addStake_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka with invalid chainId on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${data.invalid_chainId}`; // invalid chainId + + // define the payload + const requestData = { + params: [data.ep07, data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_8); + console.log(message.fail_addStake_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_3)) { + addContext(test, message.vali_addStake_8); + console.log(message.vali_addStake_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_8); + } + } + } + ); + + it( + 'REGRESSION: Validate the add stake endpoint of Arka without chainId on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_addStake}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainId + + // define the payload + const requestData = { + params: [data.ep07, data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_addStake_9); + console.log(message.fail_addStake_9); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_addStake_9); + console.log(message.vali_addStake_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_addStake_9); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/arka/checkWhitelist.spec.js b/test/specs/testnet/paymasterAPIs/arka/checkWhitelist.spec.js index cb63090..cf5db19 100644 --- a/test/specs/testnet/paymasterAPIs/arka/checkWhitelist.spec.js +++ b/test/specs/testnet/paymasterAPIs/arka/checkWhitelist.spec.js @@ -3,7 +3,7 @@ dotenv.config(); // init dotenv import { ethers } from 'ethers'; import { assert } from 'chai'; import addContext from 'mochawesome/addContext.js'; -import Helper from '../../../../utils/helper.js'; +import helper from '../../../../utils/helper.js'; import data from '../../../../data/testData.json' assert { type: 'json' }; import { randomChainId, @@ -21,54 +21,131 @@ const header = { describe('Validate the check whitelist endpoint of the Arka', function () { it( - 'SMOKE: Validate the Check Whitelist endpoint which was already whitelisted with v1 of Arka on ' + + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } - // make the random address whitelisted + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_whitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url2, requestData, header); // perform assertions - assert.include(response.data.message, constant.add_whitelist_1); + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); // perform assertions - assert.equal(response.status, constant.successCode_1); + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_whitelistv1_1); + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint with v1 and userVp parameter of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; - // wait for the few seconds - Helper.wait(15000); + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; - // check the whitelist status + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); // perform assertions assert.include(response.data.message, constant.check_whitelist_1); // perform assertions - assert.equal(response.status, constant.successCode_1); + addContext(test, message.vali_checkWhitelist_0); + console.log(message.vali_checkWhitelist_0); } catch (e) { console.error(e); const eString = e.toString(); @@ -79,29 +156,137 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v1 of Arka on ' + + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; - // check the whitelist status + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v1 and userVp parameter of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); // perform assertions assert.include(response.data.message, constant.check_whitelist_2); // perform assertions - assert.equal(response.status, constant.successCode_1); + addContext(test, message.vali_checkWhitelist_00); + console.log(message.vali_checkWhitelist_00); } catch (e) { console.error(e); const eString = e.toString(); @@ -112,22 +297,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid url of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and invalid url of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid url + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_invalid, // invalid url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_2); console.log(message.fail_checkWhitelistv1_2); @@ -148,22 +333,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and incorrect url of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and incorrect url of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist_incorrect}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // incorrect url + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_incorrect, // incorrect url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_3); console.log(message.fail_checkWhitelistv1_3); @@ -184,24 +369,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid address of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and invalid address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + // define the payload + const requestData = { + params: [data.invalid_sponsorAddress], // invalid addres + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [ - data.invalid_sponsorAddress, // invalid address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_4); console.log(message.fail_checkWhitelistv1_4); @@ -222,24 +405,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and incorrect address of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and incorrect address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.incorrect_sponsorAddress], // incorrect addres + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [ - data.incorrect_sponsorAddress, // incorrect address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_44); console.log(message.fail_checkWhitelistv1_44); @@ -260,23 +441,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without address of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and without address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + // define the payload + const requestData = { + params: [], // without addres + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [ - randomChainId, // without address - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_5); console.log(message.fail_checkWhitelistv1_5); @@ -297,22 +477,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid apikey of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and invalid apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid apikey + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [addresses, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_6); console.log(message.fail_checkWhitelistv1_6); @@ -333,22 +513,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without apikey of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and without apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist}?&chainId=${randomChainId}&useVp=true`; // without apikey + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [addresses, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_7); console.log(message.fail_checkWhitelistv1_7); @@ -369,22 +549,22 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without chainid of Arka on ' + + 'REGRESSION: Validate the Check Whitelist endpoint with v1, userVp parameter and without chainid of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&useVp=true`; // without chainid + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist, - { - params: [addresses, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv1_8); console.log(message.fail_checkWhitelistv1_8); @@ -405,84 +585,130 @@ describe('Validate the check whitelist endpoint of the Arka', function () { ); it( - 'SMOKE: Validate the Check Whitelist endpoint which was already whitelisted with v2 of Arka on ' + + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; - // make the random address whitelisted + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY_ARKA], - }, - header - ); + const response = await axios.post(url1, requestData, header); // perform assertions - assert.include(response.data.message, constant.add_whitelist_1); + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); - // perform assertions - assert.equal(response.status, constant.successCode_1); + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelistv2_1); - } + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } - // wait for the few seconds - Helper.wait(15000); + // wait for the response + helper.wait(data.mediumTimeout); + } - // check the whitelist status + // validate the whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY_ARKA], - }, - header - ); + const response = await axios.post(url2, requestData, header); // perform assertions - assert.include(response.data.message, constant.check_whitelist_1); + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); // perform assertions - assert.equal(response.status, constant.successCode_1); + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_checkWhitelistv2_1); + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); } } ); it( - 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v2 of Arka on ' + + 'SMOKE: Validate the Check Whitelist endpoint which was already whitelisted with v1 of Arka on ' + randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; - // check the whitelist status + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [addresses], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY_ARKA], - }, - header - ); + const response = await axios.post(url, requestData, header); // perform assertions - assert.include(response.data.message, constant.check_whitelist_2); + assert.include(response.data.message, constant.check_whitelist_1); // perform assertions assert.equal(response.status, constant.successCode_1); @@ -490,75 +716,810 @@ describe('Validate the check whitelist endpoint of the Arka', function () { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_checkWhitelistv2_1); + assert.fail(message.fail_checkWhitelistv1_1); } } ); it( - 'REGRESSION: Validate the Check Whitelist endpoint with v2 and invalid url of Arka on ' + + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + var test = this; + // validate the remove whitelist address endpoint with v1 and userVp parameter try { - const response = await axios.post( - data.arka_checkwhitelist_v2_invalid, // invalid url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url1, requestData, header); - addContext(test, message.fail_checkWhitelistv2_2); - console.log(message.fail_checkWhitelistv2_2); + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_checkWhitelistv2_2); - console.log(message.vali_checkWhitelistv2_2); + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_checkWhitelistv2_2); + assert.fail(message.fail_removeWhitelistv1_00); } - } - } - ); - it( - 'REGRESSION: Validate the Check Whitelist endpoint with v2 and incorrect url of Arka on ' + - randomChainName + - ' Network', - async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // wait for the response + helper.wait(data.mediumTimeout); + } + // validate the remove whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_checkwhitelist_v2_incorrect, // incorrect url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url2, requestData, header); - addContext(test, message.fail_checkWhitelistv2_3); - console.log(message.fail_checkWhitelistv2_3); + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_checkWhitelistv2_3); - console.log(message.vali_checkWhitelistv2_3); + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); } else { - console.error(e); + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v1 of Arka on ' + + randomChainName + + ' Network', + async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [addresses], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_1); + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_2); + console.log(message.fail_checkWhitelistv1_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv1_2); + console.log(message.vali_checkWhitelistv1_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist_incorrect}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // incorrect url + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_3); + console.log(message.fail_checkWhitelistv1_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv1_3); + console.log(message.vali_checkWhitelistv1_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.invalid_sponsorAddress], // invalid address + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_4); + console.log(message.fail_checkWhitelistv1_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_checkWhitelistv1_4); + console.log(message.vali_checkWhitelistv1_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.incorrect_sponsorAddress], // incorrect address + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_44); + console.log(message.fail_checkWhitelistv1_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_checkWhitelistv1_44); + console.log(message.vali_checkWhitelistv1_44); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [], // without address + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_5); + console.log(message.fail_checkWhitelistv1_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_5); + console.log(message.vali_checkWhitelistv1_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_6); + console.log(message.fail_checkWhitelistv1_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_6); + console.log(message.vali_checkWhitelistv1_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_7); + console.log(message.fail_checkWhitelistv1_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_7); + console.log(message.vali_checkWhitelistv1_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv1_8); + console.log(message.fail_checkWhitelistv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_checkWhitelistv1_8); + console.log(message.vali_checkWhitelistv1_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv1_8); + } + } + } + ); + + it( + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was already whitelisted with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_1); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_1); + } + } + ); + + it( + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the Check Whitelist endpoint which was not whitelisted with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // perform assertions + assert.include(response.data.message, constant.check_whitelist_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist_v2_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv2_2); + console.log(message.fail_checkWhitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv2_2); + console.log(message.vali_checkWhitelistv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Check Whitelist endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_checkwhitelist_v2_incorrect}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // incorrect url + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelistv2_3); + console.log(message.fail_checkWhitelistv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_checkWhitelistv2_3); + console.log(message.vali_checkWhitelistv2_3); + } else { + console.error(e); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_checkWhitelistv2_3); @@ -574,18 +1535,17 @@ describe('Validate the check whitelist endpoint of the Arka', function () { async function () { var test = this; + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.invalid_sponsorAddress], // invalid address + }; + + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [ - data.invalid_sponsorAddress, // invalid address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv2_4); console.log(message.fail_checkWhitelistv2_4); @@ -612,18 +1572,17 @@ describe('Validate the check whitelist endpoint of the Arka', function () { async function () { var test = this; + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.incorrect_sponsorAddress], // incorrect address + }; + + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [ - data.incorrect_sponsorAddress, // incorrect address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv2_4); console.log(message.fail_checkWhitelistv2_4); @@ -650,17 +1609,17 @@ describe('Validate the check whitelist endpoint of the Arka', function () { async function () { var test = this; + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [], // without address + }; + + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [ - randomChainId, // without address - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv2_5); console.log(message.fail_checkWhitelistv2_5); @@ -689,14 +1648,17 @@ describe('Validate the check whitelist endpoint of the Arka', function () { const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [addresses, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv2_6); console.log(message.fail_checkWhitelistv2_6); @@ -725,14 +1687,17 @@ describe('Validate the check whitelist endpoint of the Arka', function () { const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist_v2}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [addresses, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv2_7); console.log(message.fail_checkWhitelistv2_7); @@ -761,14 +1726,17 @@ describe('Validate the check whitelist endpoint of the Arka', function () { const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_checkwhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [addresses], + }; + + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_checkwhitelist_v2, - { - params: [addresses, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_checkWhitelistv2_8); console.log(message.fail_checkWhitelistv2_8); @@ -787,4 +1755,292 @@ describe('Validate the check whitelist endpoint of the Arka', function () { } } ); + + it( + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'REGRESSION: Validate the check whitelist endpoint with v1 and userVp parameter as a false of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=false`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty( + response.data.message, + message.vali_checkWhitelist_message + ); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.add_whitelist_3, + message.vali_checkWhitelist_messageText + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_1); + } + } + ); + + it( + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'REGRESSION: Validate the check whitelist endpoint with v1 and userVp parameter with invalid data of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_checkwhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=qwerty`; + + // define the payload + const requestData = { + params: [data.arka_sponsorAddress], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_checkWhitelist_13); + console.log(message.fail_checkWhitelist_13); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_3)) { + addContext(test, message.vali_checkWhitelist_13); + console.log(message.vali_checkWhitelist_13); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_checkWhitelist_13); + } + } + } + ); }); diff --git a/test/specs/testnet/paymasterAPIs/arka/deployVp.spec.js b/test/specs/testnet/paymasterAPIs/arka/deployVp.spec.js new file mode 100644 index 0000000..219ae4f --- /dev/null +++ b/test/specs/testnet/paymasterAPIs/arka/deployVp.spec.js @@ -0,0 +1,357 @@ +import * as dotenv from 'dotenv'; +dotenv.config(); // init dotenv +import { ethers } from 'ethers'; +import { assert } from 'chai'; +import addContext from 'mochawesome/addContext.js'; +import data from '../../../../data/testData.json' assert { type: 'json' }; +import { + randomChainId, + randomChainName, +} from '../../../../utils/sharedData_testnet.js'; +import axios from 'axios'; +import message from '../../../../data/messages.json' assert { type: 'json' }; +import constant from '../../../../data/constant.json' assert { type: 'json' }; + +// define header with valid details +const header = { + Accept: 'application/json', + 'Content-Type': 'application/json', +}; + +describe('Validate the deploy-vp endpoint of the Arka', function () { + it( + 'SMOKE: Validate the deploy-vp endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.ep06, data.ep07], + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the verifyingPaymaster parameter in the response + assert.isNotEmpty( + response.data.verifyingPaymaster, + message.vali_deployVerifyingPaymaster_verifyingPaymaster + ); + + // validate the txHash parameter in the response + assert.isNotEmpty( + response.data.txHash, + message.vali_deployVerifyingPaymaster_txHash + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_1)) { + addContext(test, message.vali_deployVerifyingPaymaster_1); + console.log(message.vali_deployVerifyingPaymaster_1); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_1); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka with invalid entry points on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.invalid_epversion], // invalid entry point + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_2); + console.log(message.fail_deployVerifyingPaymaster_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_2)) { + addContext(test, message.vali_deployVerifyingPaymaster_2); + console.log(message.vali_deployVerifyingPaymaster_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka with incorrect entry points on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.incorrect_epversion], // incorrect entry point + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_3); + console.log(message.fail_deployVerifyingPaymaster_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_2)) { + addContext(test, message.vali_deployVerifyingPaymaster_3); + console.log(message.vali_deployVerifyingPaymaster_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka without entry points on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [], // without entry point + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_4); + console.log(message.fail_deployVerifyingPaymaster_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_2)) { + addContext(test, message.vali_deployVerifyingPaymaster_4); + console.log(message.vali_deployVerifyingPaymaster_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka with incorrect apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.DATA_API_KEY}&chainId=${randomChainId}`; // incorrect apikey + + // define the payload + const requestData = { + params: [data.ep07], + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_5); + console.log(message.fail_deployVerifyingPaymaster_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_deployVerifyingPaymaster_5); + console.log(message.vali_deployVerifyingPaymaster_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka with invalid apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [data.ep07], + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_6); + console.log(message.fail_deployVerifyingPaymaster_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_deployVerifyingPaymaster_6); + console.log(message.vali_deployVerifyingPaymaster_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka without apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [data.ep07], + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_7); + console.log(message.fail_deployVerifyingPaymaster_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deployVerifyingPaymaster_7); + console.log(message.vali_deployVerifyingPaymaster_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka with invalid chainId on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${data.invalid_chainId}`; // invalid chainId + + // define the payload + const requestData = { + params: [data.ep07], + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_8); + console.log(message.fail_deployVerifyingPaymaster_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_network_3)) { + addContext(test, message.vali_deployVerifyingPaymaster_8); + console.log(message.vali_deployVerifyingPaymaster_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_8); + } + } + } + ); + + it( + 'REGRESSION: Validate the deploy-vp endpoint of Arka without chainId on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deployVerifyingPaymaster}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainId + + // define the payload + const requestData = { + params: [data.ep07], + }; + + var test = this; + // validate the deployVerifyingPaymaster endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deployVerifyingPaymaster_9); + console.log(message.fail_deployVerifyingPaymaster_9); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deployVerifyingPaymaster_9); + console.log(message.vali_deployVerifyingPaymaster_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deployVerifyingPaymaster_9); + } + } + } + ); +}); diff --git a/test/specs/testnet/paymasterAPIs/arka/deposit.spec.js b/test/specs/testnet/paymasterAPIs/arka/deposit.spec.js index 9c91679..f0697b3 100644 --- a/test/specs/testnet/paymasterAPIs/arka/deposit.spec.js +++ b/test/specs/testnet/paymasterAPIs/arka/deposit.spec.js @@ -23,17 +23,18 @@ describe('Validate the deposit endpoint of the Arka', function () { randomChainName + ' network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.value], + }; - // make the random address whitelisted + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [data.value, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); // perform assertions assert.include(response.data.message, constant.deposit_2); @@ -54,16 +55,18 @@ describe('Validate the deposit endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [data.value], + }; + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit_invalid, // invalid url - { - params: [data.value, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_2); console.log(message.fail_depositv1_2); @@ -88,16 +91,18 @@ describe('Validate the deposit endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit_incorrect}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // incorrect url + + // define the payload + const requestData = { + params: [data.value], + }; + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit_incorrect, // incorrect url - { - params: [data.value, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_3); console.log(message.fail_depositv1_3); @@ -117,25 +122,23 @@ describe('Validate the deposit endpoint of the Arka', function () { } ); - it.only( + it( 'REGRESSION: Validate the deposit endpoint with v1 and invalid value of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + // define the payload + const requestData = { + params: [data.invalidValue], // invalid value + }; + + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [ - data.invalidValue, // invalid value - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_4); console.log(message.fail_depositv1_4); @@ -155,25 +158,23 @@ describe('Validate the deposit endpoint of the Arka', function () { } ); - it.only( + it( 'REGRESSION: Validate the deposit endpoint with v1 and exceeded value of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.exceededValue], // exceeded value + }; + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [ - data.exceededValue, // exceeded value - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_44); console.log(message.fail_depositv1_44); @@ -193,24 +194,23 @@ describe('Validate the deposit endpoint of the Arka', function () { } ); - it.only( + it( 'REGRESSION: Validate the deposit endpoint with v1 and without value of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + // define the payload + const requestData = { + params: [], // without value + }; + + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [ - randomChainId, // without value - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_5); console.log(message.fail_depositv1_5); @@ -235,16 +235,18 @@ describe('Validate the deposit endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [data.value, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_6); console.log(message.fail_depositv1_6); @@ -269,16 +271,18 @@ describe('Validate the deposit endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?chainId=${randomChainId}`; // without apikey + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [data.value, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_7); console.log(message.fail_depositv1_7); @@ -303,16 +307,18 @@ describe('Validate the deposit endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint try { - const response = await axios.post( - data.arka_deposit, - { - params: [data.value, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_depositv1_8); console.log(message.fail_depositv1_8); @@ -333,24 +339,32 @@ describe('Validate the deposit endpoint of the Arka', function () { ); it( - 'SMOKE: Validate the deposit endpoint with v2 of Arka on the ' + + 'SMOKE: Validate the deposit endpoint with v1 and userVp parameter of Arka on ' + randomChainName + - ' network', + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.value], + }; - // make the random address whitelisted + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [data.value, randomChainId, process.env.API_KEY_ARKA], - }, - header - ); + const response = await axios.post(url, requestData, header); - // perform assertions - assert.include(response.data.message, constant.deposit_2); + // validate the message parameter in the response + assert.isNotEmpty(response.data.message, message.vali_deposit_message); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.deposit_2, + message.vali_deposit_messageText + ); // perform assertions assert.equal(response.status, constant.successCode_1); @@ -358,289 +372,1091 @@ describe('Validate the deposit endpoint of the Arka', function () { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_1); + assert.fail(message.fail_deposit1_1); } } ); it( - 'REGRESSION: Validate the deposit endpoint with v2 and invalid url of Arka on ' + + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and invalid value on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.invalidValue], // invalid value + }; + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2_invalid, // invalid url - { - params: [data.value, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_2); - console.log(message.fail_depositv2_2); + addContext(test, message.fail_deposit_10); + console.log(message.fail_deposit_10); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_depositv2_2); - console.log(message.vali_depositv2_2); + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_10); + console.log(message.vali_deposit_10); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_2); + assert.fail(message.fail_deposit_10); } } } ); it( - 'REGRESSION: Validate the deposit endpoint with v2 and incorrect url of Arka on ' + + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and exceeded value on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.exceededValue], // exceeded value + }; + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2_incorrect, // incorrect url - { - params: [data.value, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_3); - console.log(message.fail_depositv2_3); + addContext(test, message.fail_deposit_11); + console.log(message.fail_deposit_11); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_depositv2_3); - console.log(message.vali_depositv2_3); + if (error.includes(constant.deposit_3)) { + addContext(test, message.vali_deposit_11); + console.log(message.vali_deposit_11); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_3); + assert.fail(message.fail_deposit_11); } } } ); - it.only( - 'REGRESSION: Validate the deposit endpoint with v2 and invalid value of Arka on ' + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and without value on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + // define the payload + const requestData = { + params: [], // without value + }; + + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [ - data.invalidValue, // invalid value - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_4); - console.log(message.fail_depositv2_4); + addContext(test, message.fail_deposit_12); + console.log(message.fail_deposit_12); } catch (e) { const error = e.response.data.error; if (error.includes(constant.invalid_data)) { - addContext(test, message.vali_depositv2_4); - console.log(message.vali_depositv2_4); + addContext(test, message.vali_deposit_12); + console.log(message.vali_deposit_12); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_4); + assert.fail(message.fail_deposit_12); } } } ); - it.only( - 'REGRESSION: Validate the deposit endpoint with v2 and exceeded value of Arka on ' + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and incorrect apikey on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.DATA_API_KEY}&chainId=${randomChainId}&useVp=true`; // incorrect apikey + + // define the payload + const requestData = { + params: [data.value], + }; + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [ - data.exceededValue, // exceeded value - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_4); - console.log(message.fail_depositv2_4); + addContext(test, message.fail_deposit_5); + console.log(message.fail_deposit_5); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.deposit_3)) { - addContext(test, message.vali_depositv2_4); - console.log(message.vali_depositv2_4); + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_deposit_5); + console.log(message.vali_deposit_5); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_4); + assert.fail(message.fail_deposit_5); } } } ); - it.only( - 'REGRESSION: Validate the deposit endpoint with v2 and without value of Arka on ' + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and invalid apikey on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid apikey + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [ - randomChainId, // without value - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_5); - console.log(message.fail_depositv2_5); + addContext(test, message.fail_deposit_6); + console.log(message.fail_deposit_6); } catch (e) { const error = e.response.data.error; if (error.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_depositv2_5); - console.log(message.vali_depositv2_5); + addContext(test, message.vali_deposit_6); + console.log(message.vali_deposit_6); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_5); + assert.fail(message.fail_deposit_6); } } } ); it( - 'REGRESSION: Validate the deposit endpoint with v2 and invalid apikey of Arka on ' + + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and without apikey on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?chainId=${randomChainId}&useVp=true`; // without apikey + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [data.value, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_6); - console.log(message.fail_depositv2_6); + addContext(test, message.fail_deposit_7); + console.log(message.fail_deposit_7); } catch (e) { const error = e.response.data.error; if (error.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_depositv2_6); - console.log(message.vali_depositv2_6); + addContext(test, message.vali_deposit_7); + console.log(message.vali_deposit_7); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_6); + assert.fail(message.fail_deposit_7); } } } ); it( - 'REGRESSION: Validate the deposit endpoint with v2 and without apikey of Arka on ' + + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and invalid chainId on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${data.invalid_chainId}&useVp=true`; // invalid chainId + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [data.value, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_depositv2_7); - console.log(message.fail_depositv2_7); + addContext(test, message.fail_deposit_8); + console.log(message.fail_deposit_8); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_depositv2_7); - console.log(message.vali_depositv2_7); + if (error.includes(constant.invalid_network_3)) { + addContext(test, message.vali_deposit_8); + console.log(message.vali_deposit_8); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_7); + assert.fail(message.fail_deposit_8); } } } ); it( - 'REGRESSION: Validate the deposit endpoint with v2 and without chainid of Arka on ' + + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter and without chainId on ' + randomChainName + ' Network', async function () { + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&useVp=true`; // without chainId + + // define the payload + const requestData = { + params: [data.value], + }; + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_9); + console.log(message.fail_deposit_9); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_9); + console.log(message.vali_deposit_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_9); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter with false value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=false`; // false value + + // define the payload + const requestData = { + params: [data.value], + }; + var test = this; + // validate the deposit endpoint try { - const response = await axios.post( - data.arka_deposit_v2, - { - params: [data.value, process.env.API_KEY], // without chainid - }, - header + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty(response.data.message, message.vali_deposit_message); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.deposit_2, + message.vali_deposit_messageText ); - addContext(test, message.fail_depositv2_8); - console.log(message.fail_depositv2_8); + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit2_1); + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v1, userVp parameter with invalid data on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=qwerty`; // invalid data in userVp parameter + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_13); + console.log(message.fail_deposit_13); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_depositv2_8); - console.log(message.vali_depositv2_8); + if (error.includes(constant.deployVp_3)) { + addContext(test, message.vali_deposit_13); + console.log(message.vali_deposit_13); } else { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_depositv2_8); + assert.fail(message.fail_deposit_13); + } + } + } + ); + + it( + 'SMOKE: Validate the deposit endpoint with v2 of Arka on the ' + + randomChainName + + ' network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + // perform assertions + assert.include(response.data.message, constant.deposit_2); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_2); + console.log(message.fail_depositv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_depositv2_2); + console.log(message.vali_depositv2_2); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and incorrect url of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2_incorrect}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // incorrect url + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_3); + console.log(message.fail_depositv2_3); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_depositv2_3); + console.log(message.vali_depositv2_3); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_3); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and invalid value of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.invalidValue], // invalid value + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_4); + console.log(message.fail_depositv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_depositv2_4); + console.log(message.vali_depositv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and exceeded value of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [data.exceededValue], // exceeded value + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_4); + console.log(message.fail_depositv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deposit_3)) { + addContext(test, message.vali_depositv2_4); + console.log(message.vali_depositv2_4); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and without value of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [], // without value + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_5); + console.log(message.fail_depositv2_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_5); + console.log(message.vali_depositv2_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_6); + console.log(message.fail_depositv2_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_6); + console.log(message.vali_depositv2_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and without apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_7); + console.log(message.fail_depositv2_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_7); + console.log(message.vali_depositv2_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint with v2 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the addStake endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_depositv2_8); + console.log(message.fail_depositv2_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_depositv2_8); + console.log(message.vali_depositv2_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_depositv2_8); + } + } + } + ); + + it( + 'SMOKE: Validate the deposit endpoint with v2 and userVp parameter of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty(response.data.message, message.vali_deposit_message); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.deposit_2, + message.vali_deposit_messageText + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit1_1); + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and invalid value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.invalidValue], // invalid value + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_10); + console.log(message.fail_deposit_10); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_10); + console.log(message.vali_deposit_10); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_10); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and exceeded value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [data.exceededValue], // exceeded value + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_11); + console.log(message.fail_deposit_11); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deposit_3)) { + addContext(test, message.vali_deposit_11); + console.log(message.vali_deposit_11); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_11); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and without value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [], // without value + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_12); + console.log(message.fail_deposit_12); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_12); + console.log(message.vali_deposit_12); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_12); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and incorrect apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.DATA_API_KEY}&chainId=${randomChainId}&useVp=true`; // incorrect apikey + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_5); + console.log(message.fail_deposit_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_deposit_5); + console.log(message.vali_deposit_5); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and invalid apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid apikey + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_6); + console.log(message.fail_deposit_6); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_deposit_6); + console.log(message.vali_deposit_6); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_6); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and without apikey on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?chainId=${randomChainId}&useVp=true`; // without apikey + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_7); + console.log(message.fail_deposit_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_deposit_7); + console.log(message.vali_deposit_7); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and invalid chainId on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${data.invalid_chainId}&useVp=true`; // invalid chainId + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_8); + console.log(message.fail_deposit_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_network_3)) { + addContext(test, message.vali_deposit_8); + console.log(message.vali_deposit_8); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_8); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter and without chainId on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&useVp=true`; // without chainId + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_9); + console.log(message.fail_deposit_9); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_data)) { + addContext(test, message.vali_deposit_9); + console.log(message.vali_deposit_9); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_9); + } + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter with false value on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=false`; // false value + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty(response.data.message, message.vali_deposit_message); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.deposit_2, + message.vali_deposit_messageText + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit2_1); + } + } + ); + + it( + 'REGRESSION: Validate the deposit endpoint of Arka with v2, userVp parameter with invalid data on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_deposit_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=qwerty`; // invalid data in userVp parameter + + // define the payload + const requestData = { + params: [data.value], + }; + + var test = this; + // validate the deposit endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_deposit_13); + console.log(message.fail_deposit_13); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_3)) { + addContext(test, message.vali_deposit_13); + console.log(message.vali_deposit_13); + } else { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_deposit_13); } } } diff --git a/test/specs/testnet/paymasterAPIs/arka/getAllWhitelist.spec.js b/test/specs/testnet/paymasterAPIs/arka/getAllWhitelist.spec.js index d9f0dfb..56763f6 100644 --- a/test/specs/testnet/paymasterAPIs/arka/getAllWhitelist.spec.js +++ b/test/specs/testnet/paymasterAPIs/arka/getAllWhitelist.spec.js @@ -34,7 +34,7 @@ describe('Validate the get all whitelist endpoint of the Arka', function () { const response = await axios.post( data.arka_whitelist_v2, { - params: [addresses, randomChainId, process.env.API_KEY], + params: [addresses, randomChainId, process.env.ARKA_API_KEY_PROD], }, header ); @@ -59,7 +59,7 @@ describe('Validate the get all whitelist endpoint of the Arka', function () { const response = await axios.post( data.arka_getAllWhitelist_v2, { - params: ['1', randomChainId, process.env.API_KEY], + params: ['1', randomChainId, process.env.ARKA_API_KEY_PROD], }, header ); @@ -91,7 +91,7 @@ describe('Validate the get all whitelist endpoint of the Arka', function () { const response = await axios.post( data.arka_getAllWhitelist_v2, { - params: ['1', randomChainId, process.env.API_KEY_ARKA], + params: ['1', randomChainId, process.env.ARKA_API_KEY_PROD], }, header ); @@ -127,7 +127,7 @@ describe('Validate the get all whitelist endpoint of the Arka', function () { const response = await axios.post( data.arka_getAllWhitelist_invalid, // invalid url { - params: ['1', randomChainId, process.env.API_KEY], + params: ['1', randomChainId, process.env.ARKA_API_KEY_PROD], }, header ); @@ -163,7 +163,7 @@ describe('Validate the get all whitelist endpoint of the Arka', function () { const response = await axios.post( data.arka_getAllWhitelist_incorrect, // incorrect url { - params: ['1', randomChainId, process.env.API_KEY], + params: ['1', randomChainId, process.env.ARKA_API_KEY_PROD], }, header ); @@ -271,7 +271,7 @@ describe('Validate the get all whitelist endpoint of the Arka', function () { const response = await axios.post( data.arka_getAllWhitelist, { - params: ['1', process.env.API_KEY], // without chainid + params: ['1', process.env.ARKA_API_KEY_PROD], // without chainid }, header ); diff --git a/test/specs/testnet/paymasterAPIs/arka/metadata.spec.js b/test/specs/testnet/paymasterAPIs/arka/metadata.spec.js index 3d4c93d..97568cc 100644 --- a/test/specs/testnet/paymasterAPIs/arka/metadata.spec.js +++ b/test/specs/testnet/paymasterAPIs/arka/metadata.spec.js @@ -30,7 +30,10 @@ describe('Validate the metadata endpoint of the Arka', function () { const response = await axios.get( data.arka_metadata, { - params: { chainId: randomChainId, apiKey: process.env.API_KEY }, + params: { + chainId: randomChainId, + apiKey: process.env.ARKA_API_KEY_PROD, + }, }, header ); @@ -95,7 +98,10 @@ describe('Validate the metadata endpoint of the Arka', function () { const response = await axios.post( data.arka_metadata_invalid, // invalid url { - params: { chainId: randomChainId, apiKey: process.env.API_KEY }, + params: { + chainId: randomChainId, + apiKey: process.env.ARKA_API_KEY_PROD, + }, }, header ); @@ -131,7 +137,10 @@ describe('Validate the metadata endpoint of the Arka', function () { const response = await axios.post( data.arka_metadata_incorrect, // incorrect url { - params: { chainId: randomChainId, apiKey: process.env.API_KEY }, + params: { + chainId: randomChainId, + apiKey: process.env.ARKA_API_KEY_PROD, + }, }, header ); @@ -239,7 +248,7 @@ describe('Validate the metadata endpoint of the Arka', function () { const response = await axios.post( data.arka_metadata, { - params: { apiKey: process.env.API_KEY }, // without chainid + params: { apiKey: process.env.ARKA_API_KEY_PROD }, // without chainid }, header ); diff --git a/test/specs/testnet/paymasterAPIs/arka/pimlicoAddress.spec.js b/test/specs/testnet/paymasterAPIs/arka/pimlicoAddress.spec.js index 4c9f039..c2ee5d9 100644 --- a/test/specs/testnet/paymasterAPIs/arka/pimlicoAddress.spec.js +++ b/test/specs/testnet/paymasterAPIs/arka/pimlicoAddress.spec.js @@ -34,7 +34,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { const response = await axios.post( data.arka_whitelist, { - params: [addresses, randomChainId, process.env.API_KEY], + params: [addresses, randomChainId, process.env.ARKA_API_KEY_PROD], }, header ); @@ -63,7 +63,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { addresses, { token: data.usdc_token }, randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -97,7 +97,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { addresses, { token: data.usdc_token }, randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -138,7 +138,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { addresses, { token: data.usdc_token }, randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -177,7 +177,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { data.invalid_sponsorAddress, // invalid address { token: data.usdc_token }, randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -216,7 +216,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { data.incorrect_sponsorAddress, // incorrect address { token: data.usdc_token }, randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -254,7 +254,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { params: [ { token: data.usdc_token }, randomChainId, // without address - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -371,7 +371,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { params: [ addresses, { token: data.usdc_token }, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], // without chainid }, header @@ -412,7 +412,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { addresses, { token: data.invalid_usdc_token }, // invalid token randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header @@ -453,7 +453,7 @@ describe('Validate the pimlico address endpoint of the Arka', function () { addresses, // without token randomChainId, - process.env.API_KEY, + process.env.ARKA_API_KEY_PROD, ], }, header diff --git a/test/specs/testnet/paymasterAPIs/arka/removeWhitelist.spec.js b/test/specs/testnet/paymasterAPIs/arka/removeWhitelist.spec.js index b536b08..68becf4 100644 --- a/test/specs/testnet/paymasterAPIs/arka/removeWhitelist.spec.js +++ b/test/specs/testnet/paymasterAPIs/arka/removeWhitelist.spec.js @@ -3,13 +3,13 @@ dotenv.config(); // init dotenv import { ethers } from 'ethers'; import { assert } from 'chai'; import addContext from 'mochawesome/addContext.js'; -import Helper from '../../../../utils/helper.js'; import data from '../../../../data/testData.json' assert { type: 'json' }; import { randomChainId, randomChainName, } from '../../../../utils/sharedData_testnet.js'; import axios from 'axios'; +import helper from '../../../../utils/helper.js'; import message from '../../../../data/messages.json' assert { type: 'json' }; import constant from '../../../../data/constant.json' assert { type: 'json' }; @@ -21,202 +21,213 @@ const header = { describe('Validate the remove whitelist endpoint of the Arka', function () { it( - 'SMOKE: Validate the Remove Whitelist endpoint which was already whitelisted with v1 of Arka on ' + + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; - // make the random address whitelisted + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url1, requestData, header); // perform assertions - assert.include(response.data.message, constant.add_whitelist_1); + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); - // perform assertions - assert.equal(response.status, constant.successCode_1); + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_whitelistv1_1); - } + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } - // wait for the few seconds - Helper.wait(15000); + // wait for the response + helper.wait(data.mediumTimeout); + } - // validate the remove whitelist endpoint + // validate the whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url2, requestData, header); // perform assertions - assert.include(response.data.message, constant.remove_whitelist_2); + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); - // perform assertions - assert.equal(response.status, constant.successCode_1); + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelistv1_1); - } - } - ); + const error = e.response.data.error; - it( - 'REGRESSION: Validate the Remove Whitelist endpoint which was not whitelisted with v1 of Arka on ' + - randomChainName + - ' Network', - async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } - // validate remove whitelist endpoint + // validate the whitelist address endpoint with v2 and without userVp parameter try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); - addContext(test, message.fail_removeWhitelistv1_0); - console.log(message.fail_removeWhitelistv1_0); + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelistv1_1); - console.log(message.vali_removeWhitelistv1_1); + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv1_0); + assert.fail(message.fail_whitelistv2_0); } + + // wait for the response + helper.wait(data.mediumTimeout); } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and invalid url of Arka on ' + + 'SMOKE: Validate the remove whitelist endpoint with v1 and userVp parameter of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_invalid, // invalid url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty( + response.data.message, + message.vali_removeWhitelist_message ); - addContext(test, message.fail_removeWhitelistv1_2); - console.log(message.fail_removeWhitelistv1_2); - } catch (e) { - const error = e.response.data.error; + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.remove_whitelist_3, + message.vali_removeWhitelist_messageText + ); - if (error.includes(constant.not_found)) { - addContext(test, message.vali_removeWhitelistv1_2); - console.log(message.vali_removeWhitelistv1_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_removeWhitelistv1_2); - } + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_1); } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and incorrect url of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and invalid url of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid url + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_incorrect, // incorrect url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_removeWhitelistv1_3); - console.log(message.fail_removeWhitelistv1_3); + addContext(test, message.fail_removeWhitelistv1_2); + console.log(message.fail_removeWhitelistv1_2); } catch (e) { const error = e.response.data.error; if (error.includes(constant.not_found)) { - addContext(test, message.vali_removeWhitelistv1_3); - console.log(message.vali_removeWhitelistv1_3); + addContext(test, message.vali_removeWhitelistv1_2); + console.log(message.vali_removeWhitelistv1_2); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv1_3); + assert.fail(message.fail_removeWhitelistv1_2); } } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and invalid address of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and invalid address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.invalid_sponsorAddress]], // invalid address + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [ - data.invalid_sponsorAddress, // invalid address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv1_4); console.log(message.fail_removeWhitelistv1_4); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { + if (error.includes(constant.add_whitelist_4)) { addContext(test, message.vali_removeWhitelistv1_4); console.log(message.vali_removeWhitelistv1_4); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv1_4); @@ -226,35 +237,33 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and incorrect address of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and incorrect address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.incorrect_sponsorAddress]], // incorrect address + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [ - data.incorrect_sponsorAddress, // incorrect address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv1_44); console.log(message.fail_removeWhitelistv1_44); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { + if (error.includes(constant.add_whitelist_4)) { addContext(test, message.vali_removeWhitelistv1_44); console.log(message.vali_removeWhitelistv1_44); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv1_44); @@ -264,23 +273,22 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and without address of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and without address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[]], // without address + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [ - randomChainId, // without address - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv1_5); console.log(message.fail_removeWhitelistv1_5); @@ -291,7 +299,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv1_5); console.log(message.vali_removeWhitelistv1_5); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv1_5); @@ -301,22 +309,22 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and invalid apikey of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and invalid apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid apikey + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [addresses, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv1_6); console.log(message.fail_removeWhitelistv1_6); @@ -327,7 +335,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv1_6); console.log(message.vali_removeWhitelistv1_6); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv1_6); @@ -337,22 +345,22 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and without apikey of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and without apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist}?chainId=${randomChainId}&useVp=true`; // without apikey + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [addresses, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv1_7); console.log(message.fail_removeWhitelistv1_7); @@ -363,7 +371,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv1_7); console.log(message.vali_removeWhitelistv1_7); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv1_7); @@ -373,22 +381,22 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v1 and without chainid of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1, userVp parameter and without chainid of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&useVp=true`; // without chainid + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist, - { - params: [addresses, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv1_8); console.log(message.fail_removeWhitelistv1_8); @@ -399,7 +407,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv1_8); console.log(message.vali_removeWhitelistv1_8); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv1_8); @@ -409,51 +417,127 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'SMOKE: Validate the Remove Whitelist endpoint which was already whitelisted with v2 of Arka on ' + + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } - // make the random address whitelisted + // validate the whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY_ARKA], - }, - header - ); + const response = await axios.post(url2, requestData, header); // perform assertions - assert.include(response.data.message, constant.add_whitelist_1); + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); // perform assertions - assert.equal(response.status, constant.successCode_1); + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_whitelistv2_1); + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); } + } + ); - // wait for the few seconds - Helper.wait(15000); + it( + 'SMOKE: Validate the remove Whitelist endpoint with v1 and without userVp parameter of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY_ARKA], - }, - header - ); + const response = await axios.post(url, requestData, header); // perform assertions - assert.include(response.data.message, constant.remove_whitelist_2); + assert.include(response.data.message, constant.remove_whitelist_3); // perform assertions assert.equal(response.status, constant.successCode_1); @@ -461,214 +545,537 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv2_1); + assert.fail(message.fail_removeWhitelistv1_1); } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint which was not whitelisted with v2 of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and invalid url of Arka on ' + randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; - // validate remove whitelist endpoint + // define the url + const url = `${data.arka_removeWhitelist_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_removeWhitelistv2_2); - console.log(message.fail_removeWhitelistv2_2); + addContext(test, message.fail_removeWhitelistv1_2); + console.log(message.fail_removeWhitelistv1_2); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.remove_whitelist_1)) { - addContext(test, message.vali_removeWhitelistv2_1); - console.log(message.vali_removeWhitelistv2_1); + if (error.includes(constant.not_found)) { + addContext(test, message.vali_removeWhitelistv1_2); + console.log(message.vali_removeWhitelistv1_2); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv2_1); + assert.fail(message.fail_removeWhitelistv1_2); } } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and invalid url of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and invalid address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + // define the payload + const requestData = { + params: [[data.invalid_sponsorAddress]], // invalid address + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2_invalid, // invalid url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_removeWhitelistv2_2); - console.log(message.fail_removeWhitelistv2_2); + addContext(test, message.fail_removeWhitelistv1_4); + console.log(message.fail_removeWhitelistv1_4); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_removeWhitelistv2_2); - console.log(message.vali_removeWhitelistv2_2); + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_removeWhitelistv1_4); + console.log(message.vali_removeWhitelistv1_4); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv2_2); + assert.fail(message.fail_removeWhitelistv1_4); } } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and incorrect url of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and incorrect address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + // define the payload + const requestData = { + params: [[data.incorrect_sponsorAddress]], // incorrect address + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2_incorrect, // incorrect url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_removeWhitelistv2_3); - console.log(message.fail_removeWhitelistv2_3); + addContext(test, message.fail_removeWhitelistv1_44); + console.log(message.fail_removeWhitelistv1_44); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_removeWhitelistv2_3); - console.log(message.vali_removeWhitelistv2_3); + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_removeWhitelistv1_44); + console.log(message.vali_removeWhitelistv1_44); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv2_3); + assert.fail(message.fail_removeWhitelistv1_44); } } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and invalid address of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and without address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[]], // without address + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [ - data.invalid_sponsorAddress, // invalid address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_removeWhitelistv2_4); - console.log(message.fail_removeWhitelistv2_4); + addContext(test, message.fail_removeWhitelistv1_5); + console.log(message.fail_removeWhitelistv1_5); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { - addContext(test, message.vali_removeWhitelistv2_4); - console.log(message.vali_removeWhitelistv2_4); + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_5); + console.log(message.vali_removeWhitelistv1_5); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv2_4); + assert.fail(message.fail_removeWhitelistv1_5); } } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and incorrect address of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and invalid apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [ - data.incorrect_sponsorAddress, // incorrect address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_removeWhitelistv2_44); - console.log(message.fail_removeWhitelistv2_44); + addContext(test, message.fail_removeWhitelistv1_6); + console.log(message.fail_removeWhitelistv1_6); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { - addContext(test, message.vali_removeWhitelistv2_44); - console.log(message.vali_removeWhitelistv2_44); - } else { - console.error(e); - const eString = e.toString(); + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_6); + console.log(message.vali_removeWhitelistv1_6); + } else { + console.error(e.response.data.error); + const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_removeWhitelistv2_44); + assert.fail(message.fail_removeWhitelistv1_6); } } } ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and without address of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and without apikey of Arka on ' + randomChainName + ' Network', async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_removeWhitelist}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_removeWhitelistv1_7); + console.log(message.fail_removeWhitelistv1_7); + } catch (e) { + const error = e.response.data.error; + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_removeWhitelistv1_7); + console.log(message.vali_removeWhitelistv1_7); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the remove Whitelist endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [ - randomChainId, // without address - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_removeWhitelistv1_8); + console.log(message.fail_removeWhitelistv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_removeWhitelistv1_8); + console.log(message.vali_removeWhitelistv1_8); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_8); + } + } + } + ); + + it( + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the remove Whitelist endpoint with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // perform assertions + assert.include(response.data.message, constant.remove_whitelist_3); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_removeWhitelist_v2_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_removeWhitelistv2_2); + console.log(message.fail_removeWhitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_removeWhitelistv2_2); + console.log(message.vali_removeWhitelistv2_2); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.invalid_sponsorAddress]], // invalid address + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_removeWhitelistv2_4); + console.log(message.fail_removeWhitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_removeWhitelistv2_4); + console.log(message.vali_removeWhitelistv2_4); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.incorrect_sponsorAddress]], // incorrect address + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_removeWhitelistv2_44); + console.log(message.fail_removeWhitelistv2_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_removeWhitelistv2_44); + console.log(message.vali_removeWhitelistv2_44); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[]], // without address + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv2_5); console.log(message.fail_removeWhitelistv2_5); @@ -679,7 +1086,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv2_5); console.log(message.vali_removeWhitelistv2_5); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv2_5); @@ -689,22 +1096,25 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and invalid apikey of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and invalid apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [addresses, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv2_6); console.log(message.fail_removeWhitelistv2_6); @@ -715,7 +1125,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv2_6); console.log(message.vali_removeWhitelistv2_6); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv2_6); @@ -725,22 +1135,25 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and without apikey of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and without apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist_v2}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [addresses, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv2_7); console.log(message.fail_removeWhitelistv2_7); @@ -751,7 +1164,7 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { addContext(test, message.vali_removeWhitelistv2_7); console.log(message.vali_removeWhitelistv2_7); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv2_7); @@ -761,33 +1174,36 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Remove Whitelist endpoint with v2 and without chainid of Arka on ' + + 'REGRESSION: Validate the remove Whitelist endpoint with v2 and without chainid of Arka on ' + randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the remove whitelist endpoint try { - const response = await axios.post( - data.arka_removeWhitelist_v2, - { - params: [addresses, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_removeWhitelistv2_8); console.log(message.fail_removeWhitelistv2_8); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_apiKey)) { + if (error.includes(constant.invalid_data)) { addContext(test, message.vali_removeWhitelistv2_8); console.log(message.vali_removeWhitelistv2_8); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_removeWhitelistv2_8); @@ -795,4 +1211,286 @@ describe('Validate the remove whitelist endpoint of the Arka', function () { } } ); + + it( + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'REGRESSION: Validate the remove whitelist endpoint with v1 and userVp parameter as a false of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=false`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty( + response.data.message, + message.vali_removeWhitelist_message + ); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.remove_whitelist_3, + message.vali_removeWhitelist_messageText + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_1); + } + } + ); + + it( + 'PRECONDITION: Validate the whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_addWhitelist_1); + console.log(message.vali_addWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_5)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_2)) { + addContext(test, message.vali_whitelistv1_1); + console.log(message.vali_whitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.add_whitelist_2) || + error.includes(constant.add_whitelist_5) + ) { + addContext(test, message.vali_whitelistv2_1); + console.log(message.vali_whitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'REGRESSION: Validate the remove whitelist endpoint with v1 and userVp parameter with invalid data of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=qwerty`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_removeWhitelist_13); + console.log(message.fail_removeWhitelist_13); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_3)) { + addContext(test, message.vali_removeWhitelist_13); + console.log(message.vali_removeWhitelist_13); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelist_13); + } + } + } + ); }); diff --git a/test/specs/testnet/paymasterAPIs/arka/whitelist.spec.js b/test/specs/testnet/paymasterAPIs/arka/whitelist.spec.js index 472fc44..e6375ec 100644 --- a/test/specs/testnet/paymasterAPIs/arka/whitelist.spec.js +++ b/test/specs/testnet/paymasterAPIs/arka/whitelist.spec.js @@ -9,6 +9,7 @@ import { randomChainName, } from '../../../../utils/sharedData_testnet.js'; import axios from 'axios'; +import helper from '../../../../utils/helper.js'; import message from '../../../../data/messages.json' assert { type: 'json' }; import constant from '../../../../data/constant.json' assert { type: 'json' }; @@ -20,139 +21,219 @@ const header = { describe('Validate the whitelist endpoint of the Arka', function () { it( - 'SMOKE: Validate the Whitelist endpoint with v1 of Arka on ' + + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + // validate the remove whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_whitelist, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url2, requestData, header); // perform assertions - assert.include(response.data.message, constant.add_whitelist_1); + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); // perform assertions - assert.equal(response.status, constant.successCode_1); + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); } catch (e) { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_whitelistv1_1); + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid url of Arka on ' + + 'SMOKE: Validate the whitelist endpoint with v1 and userVp parameter of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_invalid, // invalid url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty( + response.data.message, + message.vali_whitelist_message ); - addContext(test, message.fail_whitelistv1_2); - console.log(message.fail_whitelistv1_2); - } catch (e) { - const error = e.response.data.error; + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.add_whitelist_3, + message.vali_whitelist_messageText + ); - if (error.includes(constant.not_found)) { - addContext(test, message.vali_whitelistv1_2); - console.log(message.vali_whitelistv1_2); - } else { - console.error(e); - const eString = e.toString(); - addContext(test, eString); - assert.fail(message.fail_whitelistv1_2); - } + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelist_1); } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and incorrect url of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and invalid url of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid url + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_incorrect, // incorrect url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_whitelistv1_3); - console.log(message.fail_whitelistv1_3); + addContext(test, message.fail_whitelistv1_2); + console.log(message.fail_whitelistv1_2); } catch (e) { const error = e.response.data.error; if (error.includes(constant.not_found)) { - addContext(test, message.vali_whitelistv1_3); - console.log(message.vali_whitelistv1_3); + addContext(test, message.vali_whitelistv1_2); + console.log(message.vali_whitelistv1_2); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv1_3); + assert.fail(message.fail_whitelistv1_2); } } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid address of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and invalid address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + // define the payload + const requestData = { + params: [[data.invalid_sponsorAddress]], // invalid address + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist, - { - params: [ - data.invalid_sponsorAddress, // invalid address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv1_4); console.log(message.fail_whitelistv1_4); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { + if (error.includes(constant.add_whitelist_4)) { addContext(test, message.vali_whitelistv1_4); console.log(message.vali_whitelistv1_4); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv1_4); @@ -162,35 +243,33 @@ describe('Validate the whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and incorrect address of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and incorrect address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.incorrect_sponsorAddress]], // incorrect address + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist, - { - params: [ - data.incorrect_sponsorAddress, // incorrect address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv1_44); console.log(message.fail_whitelistv1_44); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { + if (error.includes(constant.add_whitelist_4)) { addContext(test, message.vali_whitelistv1_44); console.log(message.vali_whitelistv1_44); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv1_44); @@ -200,23 +279,22 @@ describe('Validate the whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and without address of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and without address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + // define the payload + const requestData = { + params: [[]], // without address + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist, - { - params: [ - randomChainId, // without address - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv1_5); console.log(message.fail_whitelistv1_5); @@ -227,7 +305,7 @@ describe('Validate the whitelist endpoint of the Arka', function () { addContext(test, message.vali_whitelistv1_5); console.log(message.vali_whitelistv1_5); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv1_5); @@ -237,22 +315,22 @@ describe('Validate the whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid apikey of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and invalid apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; // invalid apikey + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist, - { - params: [addresses, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv1_6); console.log(message.fail_whitelistv1_6); @@ -263,7 +341,7 @@ describe('Validate the whitelist endpoint of the Arka', function () { addContext(test, message.vali_whitelistv1_6); console.log(message.vali_whitelistv1_6); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv1_6); @@ -273,22 +351,22 @@ describe('Validate the whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and without apikey of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and without apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist}?chainId=${randomChainId}&useVp=true`; // without apikey + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist, - { - params: [addresses, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv1_7); console.log(message.fail_whitelistv1_7); @@ -299,7 +377,7 @@ describe('Validate the whitelist endpoint of the Arka', function () { addContext(test, message.vali_whitelistv1_7); console.log(message.vali_whitelistv1_7); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv1_7); @@ -309,22 +387,22 @@ describe('Validate the whitelist endpoint of the Arka', function () { ); it( - 'REGRESSION: Validate the Whitelist endpoint with v1 and without chainid of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1, userVp parameter and without chainid of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&useVp=true`; // without chainid + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist, - { - params: [addresses, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv1_8); console.log(message.fail_whitelistv1_8); @@ -335,7 +413,7 @@ describe('Validate the whitelist endpoint of the Arka', function () { addContext(test, message.vali_whitelistv1_8); console.log(message.vali_whitelistv1_8); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv1_8); @@ -345,25 +423,133 @@ describe('Validate the whitelist endpoint of the Arka', function () { ); it( - 'SMOKE: Validate the Whitelist endpoint with v2 of Arka on ' + + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + randomChainName + ' Network', async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the Whitelist endpoint with v1 and without userVp parameter of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); // perform assertions - assert.include(response.data.message, constant.add_whitelist_1); + assert.include(response.data.message, constant.add_whitelist_3); // perform assertions assert.equal(response.status, constant.successCode_1); @@ -371,213 +557,582 @@ describe('Validate the whitelist endpoint of the Arka', function () { console.error(e); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv2_1); + assert.fail(message.fail_whitelistv1_1); } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid url of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid url of Arka on ' + randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2_invalid, // invalid url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_whitelistv2_2); - console.log(message.fail_whitelistv2_2); + addContext(test, message.fail_whitelistv1_2); + console.log(message.fail_whitelistv1_2); } catch (e) { const error = e.response.data.error; if (error.includes(constant.not_found)) { - addContext(test, message.vali_whitelistv2_2); - console.log(message.vali_whitelistv2_2); + addContext(test, message.vali_whitelistv1_2); + console.log(message.vali_whitelistv1_2); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv2_2); + assert.fail(message.fail_whitelistv1_2); } } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v2 and incorrect url of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; - const randomAddress = ethers.Wallet.createRandom(); - const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.invalid_sponsorAddress]], // invalid address + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2_incorrect, // incorrect url - { - params: [addresses, randomChainId, process.env.API_KEY], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_whitelistv2_3); - console.log(message.fail_whitelistv2_3); + addContext(test, message.fail_whitelistv1_4); + console.log(message.fail_whitelistv1_4); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.not_found)) { - addContext(test, message.vali_whitelistv2_3); - console.log(message.vali_whitelistv2_3); + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_whitelistv1_4); + console.log(message.vali_whitelistv1_4); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv2_3); + assert.fail(message.fail_whitelistv1_4); } } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid address of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1 and incorrect address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.incorrect_sponsorAddress]], // incorrect address + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [ - data.invalid_sponsorAddress, // invalid address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_whitelistv2_4); - console.log(message.fail_whitelistv2_4); + addContext(test, message.fail_whitelistv1_44); + console.log(message.fail_whitelistv1_44); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { - addContext(test, message.vali_whitelistv2_4); - console.log(message.vali_whitelistv2_4); + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_whitelistv1_44); + console.log(message.vali_whitelistv1_44); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv2_4); + assert.fail(message.fail_whitelistv1_44); } } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v2 and incorrect address of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1 and without address of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[]], // without address + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [ - data.incorrect_sponsorAddress, // incorrect address - randomChainId, - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_whitelistv2_4); - console.log(message.fail_whitelistv2_4); + addContext(test, message.fail_whitelistv1_5); + console.log(message.fail_whitelistv1_5); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_data)) { - addContext(test, message.vali_whitelistv2_4); - console.log(message.vali_whitelistv2_4); + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv1_5); + console.log(message.vali_whitelistv1_5); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv2_4); + assert.fail(message.fail_whitelistv1_5); } } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v2 and without address of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1 and invalid apikey of Arka on ' + randomChainName + ' Network', async function () { - var test = this; + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [ - randomChainId, // without address - process.env.API_KEY, - ], - }, - header - ); + const response = await axios.post(url, requestData, header); - addContext(test, message.fail_whitelistv2_5); - console.log(message.fail_whitelistv2_5); + addContext(test, message.fail_whitelistv1_6); + console.log(message.fail_whitelistv1_6); } catch (e) { const error = e.response.data.error; if (error.includes(constant.invalid_apiKey)) { - addContext(test, message.vali_whitelistv2_5); - console.log(message.vali_whitelistv2_5); + addContext(test, message.vali_whitelistv1_6); + console.log(message.vali_whitelistv1_6); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); - assert.fail(message.fail_whitelistv2_5); + assert.fail(message.fail_whitelistv1_6); } } } ); it( - 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid apikey of Arka on ' + + 'REGRESSION: Validate the Whitelist endpoint with v1 and without apikey of Arka on ' + randomChainName + ' Network', async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_whitelist}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelistv1_7); + console.log(message.fail_whitelistv1_7); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv1_7); + console.log(message.vali_whitelistv1_7); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_7); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v1 and without chainid of Arka on ' + + randomChainName + + ' Network', + async function () { const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [addresses, randomChainId, 'arka_public'], // invalid apikey - }, - header - ); + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelistv1_8); + console.log(message.fail_whitelistv1_8); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_whitelistv1_8); + console.log(message.vali_whitelistv1_8); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv1_8); + } + } + } + ); + + it( + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'SMOKE: Validate the Whitelist endpoint with v2 of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // perform assertions + assert.include(response.data.message, constant.add_whitelist_3); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_1); + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid url of Arka on ' + + randomChainName + + ' Network', + async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_whitelist_v2_invalid}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid url + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelistv2_2); + console.log(message.fail_whitelistv2_2); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.not_found)) { + addContext(test, message.vali_whitelistv2_2); + console.log(message.vali_whitelistv2_2); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_2); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid address of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.invalid_sponsorAddress]], // invalid address + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelistv2_4); + console.log(message.fail_whitelistv2_4); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_whitelistv2_4); + console.log(message.vali_whitelistv2_4); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_4); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and incorrect address of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.incorrect_sponsorAddress]], // incorrect address + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelistv2_44); + console.log(message.fail_whitelistv2_44); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.add_whitelist_4)) { + addContext(test, message.vali_whitelistv2_44); + console.log(message.vali_whitelistv2_44); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_44); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and without address of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[]], // without address + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelistv2_5); + console.log(message.fail_whitelistv2_5); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.invalid_apiKey)) { + addContext(test, message.vali_whitelistv2_5); + console.log(message.vali_whitelistv2_5); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelistv2_5); + } + } + } + ); + + it( + 'REGRESSION: Validate the Whitelist endpoint with v2 and invalid apikey of Arka on ' + + randomChainName + + ' Network', + async function () { + const randomAddress = ethers.Wallet.createRandom(); + const addresses = [randomAddress.address]; + + // define the url + const url = `${data.arka_whitelist_v2}?apiKey=${process.env.INVALID_ARKA_API_KEY_PROD}&chainId=${randomChainId}`; // invalid apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv2_6); console.log(message.fail_whitelistv2_6); @@ -588,7 +1143,7 @@ describe('Validate the whitelist endpoint of the Arka', function () { addContext(test, message.vali_whitelistv2_6); console.log(message.vali_whitelistv2_6); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv2_6); @@ -602,18 +1157,21 @@ describe('Validate the whitelist endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist_v2}?chainId=${randomChainId}`; // without apikey + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [addresses, randomChainId], // without apikey - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv2_7); console.log(message.fail_whitelistv2_7); @@ -624,7 +1182,7 @@ describe('Validate the whitelist endpoint of the Arka', function () { addContext(test, message.vali_whitelistv2_7); console.log(message.vali_whitelistv2_7); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv2_7); @@ -638,29 +1196,32 @@ describe('Validate the whitelist endpoint of the Arka', function () { randomChainName + ' Network', async function () { - var test = this; const randomAddress = ethers.Wallet.createRandom(); const addresses = [randomAddress.address]; + // define the url + const url = `${data.arka_whitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}`; // without chainid + + // define the payload + const requestData = { + params: [[addresses]], + }; + + var test = this; + // validate the whitelist endpoint try { - const response = await axios.post( - data.arka_whitelist_v2, - { - params: [addresses, process.env.API_KEY], // without chainid - }, - header - ); + const response = await axios.post(url, requestData, header); addContext(test, message.fail_whitelistv2_8); console.log(message.fail_whitelistv2_8); } catch (e) { const error = e.response.data.error; - if (error.includes(constant.invalid_apiKey)) { + if (error.includes(constant.add_whitelist_4)) { addContext(test, message.vali_whitelistv2_8); console.log(message.vali_whitelistv2_8); } else { - console.error(e); + console.error(e.response.data.error); const eString = e.toString(); addContext(test, eString); assert.fail(message.fail_whitelistv2_8); @@ -668,4 +1229,298 @@ describe('Validate the whitelist endpoint of the Arka', function () { } } ); + + it( + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'REGRESSION: Validate the whitelist endpoint with v1 and userVp parameter as a false of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=false`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + // validate the message parameter in the response + assert.isNotEmpty( + response.data.message, + message.vali_whitelist_message + ); + + // validate the text of the message parameter in the response + assert.include( + response.data.message, + constant.add_whitelist_3, + message.vali_whitelist_messageText + ); + + // perform assertions + assert.equal(response.status, constant.successCode_1); + } catch (e) { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelist_1); + } + } + ); + + it( + 'PRECONDITION: Validate the remove whitelist endpoint of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url1 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=true`; + const url2 = `${data.arka_removeWhitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + const url3 = `${data.arka_removeWhitelist_v2}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the remove whitelist address endpoint with v1 and userVp parameter + try { + const response = await axios.post(url1, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_00); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v1 and without userVp parameter + try { + const response = await axios.post(url2, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv1_1); + console.log(message.vali_removeWhitelistv1_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv1_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + + // validate the remove whitelist address endpoint with v2 and without userVp parameter + try { + const response = await axios.post(url3, requestData, header); + + // perform assertions + addContext(test, message.vali_removeWhitelist_1); + console.log(message.vali_removeWhitelist_1); + + // wait for the response + helper.wait(data.mediumTimeout); + } catch (e) { + const error = e.response.data.error; + + if ( + error.includes(constant.remove_whitelist_1) || + error.includes(constant.remove_whitelist_4) + ) { + addContext(test, message.vali_removeWhitelistv2_1); + console.log(message.vali_removeWhitelistv2_1); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_removeWhitelistv2_0); + } + + // wait for the response + helper.wait(data.mediumTimeout); + } + } + ); + + it( + 'REGRESSION: Validate the whitelist endpoint with v1 and userVp parameter with invalid data of Arka on ' + + randomChainName + + ' Network', + async function () { + // define the url + const url = `${data.arka_whitelist}?apiKey=${process.env.ARKA_API_KEY_PROD}&chainId=${randomChainId}&useVp=qwerty`; + + // define the payload + const requestData = { + params: [[data.arka_sponsorAddress]], + }; + + var test = this; + // validate the whitelist endpoint + try { + const response = await axios.post(url, requestData, header); + + addContext(test, message.fail_whitelist_13); + console.log(message.fail_whitelist_13); + } catch (e) { + const error = e.response.data.error; + + if (error.includes(constant.deployVp_3)) { + addContext(test, message.vali_whitelist_13); + console.log(message.vali_whitelist_13); + } else { + console.error(e.response.data.error); + const eString = e.toString(); + addContext(test, eString); + assert.fail(message.fail_whitelist_13); + } + } + } + ); });