diff --git a/.circleci/package.sh b/.circleci/package.sh
index 22209995..6c1c1b6a 100755
--- a/.circleci/package.sh
+++ b/.circleci/package.sh
@@ -69,22 +69,26 @@ run_lint () {
local ESLINT_FILE=artifacts/eslint_report
mkdir -p artifacts
- if ! npm run lint -- -f unix -o "${ESLINT_FILE}.txt"; then
- echo "eslint plain failed"
- fi
- if ! npm run lint -- -f json -o "${ESLINT_FILE}.json"; then
- ## sonarqube report
- echo "eslint json failed"
- fi
+ ESLINT_RETURN_CODE=0
+ npm run lint -- -f json -o "${ESLINT_FILE}.json.tmp" || ESLINT_RETURN_CODE=$?
+ echo "eslint returned code=$ESLINT_RETURN_CODE"
+ jq '.' < "${ESLINT_FILE}.json.tmp" > "${ESLINT_FILE}.json"
sed -i 's|/home/circleci/project/|/root/project/|g' "${ESLINT_FILE}.json"
- if [ "$(find artifacts -name "eslint_report.json" | wc -l)" != "0" ]; then
- jq '.' < "${ESLINT_FILE}.json" > "${ESLINT_FILE}_pretty.json"
- else
- echo "ERROR: ${ESLINT_FILE}.json not found"
- exit 1
- fi
- echo 'eslint successful'
+
+ if [ ! -f "${ESLINT_FILE}.json" ]; then
+ echo "ERROR: ${ESLINT_FILE}.json not found"
+ exit 1
+ fi
+
+ MESSAGE_COUNT=$(jq '[.[] | .messages | length] | add' < ${ESLINT_FILE}.json.tmp)
+ ERROR_COUNT=$(jq '[.[] | .errorCount] | add' < ${ESLINT_FILE}.json.tmp)
+ WARNING_COUNT=$(jq '[.[] | .warningCount] | add' < ${ESLINT_FILE}.json.tmp)
+
+ echo 'eslint summary:'
+ echo " - message count: $MESSAGE_COUNT"
+ echo " - error count: $ERROR_COUNT"
+ echo " - warning count: $WARNING_COUNT"
}
########## MAIN BLOCK ##########
diff --git a/.eslintrc.json b/.eslintrc.json
deleted file mode 100644
index 8da00e4f..00000000
--- a/.eslintrc.json
+++ /dev/null
@@ -1,98 +0,0 @@
-{
- "$schema": "https://json.schemastore.org/eslintrc.json",
- "root": true,
- // "env": {
- // "browser": true,
- // "commonjs": true,
- // "node": true
- // },
- "extends": [
- "eslint:recommended",
- "plugin:@typescript-eslint/recommended-type-checked",
- "plugin:@typescript-eslint/strict-type-checked",
- "plugin:@typescript-eslint/stylistic-type-checked",
- "plugin:promise/recommended"
- ],
-
- "globals": {
- "Atomics": "readonly",
- "SharedArrayBuffer": "readonly"
- },
- "parser": "@typescript-eslint/parser",
- "parserOptions": {
- "project": "./tsconfig.json",
- "tsconfigRootDir": "."
- // "ecmaVersion": 2020,
- // "sourceType": "module",
- // "project": "./tsconfig.json"
- },
- "plugins": [
- "@stylistic",
- "@typescript-eslint",
- "promise"
- ],
- "ignorePatterns": [
- "dummy-ext/",
- "test_projects/",
- "esbuild.js"
- ],
- "rules": {
- /****** @typescript-eslint/recommended-type-checked *****/
- // "no-return-await": "off", // deprecated; must be disabled to avoid conflict with "@typescript-eslint/return-await"
- // "@typescript-eslint/return-await": "error",
- // "require-await": "error",
- // "@typescript-eslint/require-await": "error",
-
- "@stylistic/indent": [ "error", "tab" ],
- "@stylistic/comma-spacing": [ "warn", { "before": false, "after": true }],
- "@stylistic/no-extra-parens": "warn",
- "@typescript-eslint/no-restricted-types": [ "error", {
- "types": {
- "Object": "Use {} instead.",
- "String": "Use 'string' instead.",
- "Number": "Use 'number' instead.",
- "Boolean": "Use 'boolean' instead."
- }
- }],
- "@typescript-eslint/naming-convention": [ "error", {
- "selector": "interface",
- "format": [ "PascalCase" ],
- "custom": { "regex": "^I[A-Z]", "match": true }
- }],
- "@typescript-eslint/no-confusing-non-null-assertion": "warn",
- "@typescript-eslint/no-floating-promises": [ "error", { "checkThenables": true } ],
- "@typescript-eslint/no-misused-promises": "error",
- "@typescript-eslint/no-non-null-assertion": 0, // strict-type-checked="error"
- "@typescript-eslint/no-unnecessary-condition": 0, // strict-type-checked="error"
- "no-unused-vars": "off", // must be disabled to avoid conflict with "@typescript-eslint/no-unused-vars"
- "@typescript-eslint/no-unused-vars": [ // strict-type-checked="error"
- "warn", {
- "argsIgnorePattern": "^_",
- "vars": "all",
- "args": "none",
- "ignoreRestSiblings": false
- }
- ],
- "@typescript-eslint/prefer-readonly": "warn",
- "@typescript-eslint/restrict-plus-operands": "off",
- "@typescript-eslint/switch-exhaustiveness-check": "warn",
-
- "promise/catch-or-return": "warn",
- "promise/no-callback-in-promise": "off",
- "promise/always-return": [ "warn", { "ignoreLastCallback": true } ],
-
- // "no-await-in-loop": "warn",
- "no-console": "warn",
- "no-empty": "warn",
- "no-mixed-spaces-and-tabs": ["error", "smart-tabs"],
- "no-trailing-spaces": [ "error", { "skipBlankLines": false }],
- "prefer-promise-reject-errors": "error",
- "quotes": ["warn", "single"],
- "semi": [ "error", "never"],
- "space-before-blocks": [ "error", "always"],
- "space-before-function-paren": ["warn", "always"],
- "space-in-parens": [ "warn", "never"],
- "spaced-comment": [ "error", "always", { "markers": [ "/" ] }]
- }
-
-}
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 29402d27..44d5c493 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,26 +1,10 @@
{
- // "eslint.lintTask.enable": true,
- "eslint.codeActionsOnSave.mode": "problems",
- // "extension-test-runner.extractSettings": {
- // "suite": [ "suite" ],
- // "test": [ "test" ],
- // // "extractWith": "syntax",
- // "extractWith": "evaluation",
- // },
- "git.branchProtection": [
- "main"
+ "circleci.filters.branchFilter": "allBranches",
+ "circleci.persistedProjectSelection": [
+ "gh/kenherring/ablunit-test-runner"
],
- "extension-test-runner.extractSettings": {
- "suite": [
- "describe",
- "suite"
- ],
- "test": [
- "it",
- "test"
- ],
- "extractWith": "syntax"
- },
+ "eslint.lintTask.enable": true,
+ "eslint.codeActionsOnSave.mode": "problems",
"files.associations": {
"ablunit-test-profile*.json": "jsonc"
},
@@ -37,6 +21,9 @@
"out": true,
"node_modules": true
},
+ "git.branchProtection": [
+ "main"
+ ],
"json.schemas": [
{
"fileMatch": [
@@ -66,8 +53,4 @@
"task.autoDetect": "on",
"typescript.preferences.quoteStyle": "single",
"typescript.validate.enable": true,
- "circleci.filters.branchFilter": "allBranches",
- "circleci.persistedProjectSelection": [
- "gh/kenherring/ablunit-test-runner"
- ],
}
diff --git a/TODO.md b/TODO.md
deleted file mode 100644
index b741cf99..00000000
--- a/TODO.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# TODO
-
-* move `./coverage` to `./artifacts/coverage`
diff --git a/docker/build.xml b/docker/build.xml
index a309d55b..67879148 100644
--- a/docker/build.xml
+++ b/docker/build.xml
@@ -36,4 +36,12 @@
compile successful!
+
+ extracting ${env.DLC}/tty/ablunit.pl
+
+
+
+ extract complete
+
+
diff --git a/docker/run_tests.sh b/docker/run_tests.sh
index 39c3894b..b7a938da 100755
--- a/docker/run_tests.sh
+++ b/docker/run_tests.sh
@@ -11,7 +11,7 @@ options:
alternative: set the ABLUNIT_TEST_RUNNER_VSCODE_VERSION environment variable
-b drop to bash shell inside container on failure
-B same as -b, but only on error
- -C | -d delete volume 'test-runner-cache' before running tests
+ -C | -d delete volumes 'vscode-cli-cache' and 'test-runner-cache' before running tests
-i run install and run test
-m copy modified files and staged files
-n run tests without coverage
@@ -120,10 +120,10 @@ initialize () {
fi
## create volume for .vscode-test directory to persist vscode application downloads
- if ! docker volume ls | grep -q test-runner-cache; then
- echo "creating test-runner-cache volume"
- docker volume create --name test-runner-cache
- fi
+ # if ! docker volume ls | grep -q test-runner-cache; then
+ # echo "creating test-runner-cache volume"
+ # docker volume create --name test-runner-cache
+ # fi
if ! docker volume ls | grep -q vscode-cli-cache; then
echo "creating vscode-cli-cache"
docker volume create --name vscode-cli-cache
@@ -149,7 +149,7 @@ initialize () {
}
run_tests_in_docker () {
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}] pwd=$(pwd)"
+ echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}] pwd=$(pwd) ABLUNIT_TEST_RUNNER_OE_VERSION=$ABLUNIT_TEST_RUNNER_OE_VERSION"
local ABLUNIT_TEST_RUNNER_OE_VERSION
for ABLUNIT_TEST_RUNNER_OE_VERSION in "${OE_VERSIONS[@]}"; do
@@ -178,7 +178,7 @@ run_tests_in_docker () {
[ -n "${ABLUNIT_TEST_RUNNER_PROJECT_NAME:-}" ] && ARGS+=(-e ABLUNIT_TEST_RUNNER_PROJECT_NAME)
ARGS+=(
-v "${PWD}":/home/circleci/ablunit-test-runner:ro
- -v vscode-cli-cache:/home/circleci/project/.vscode-test
+ -v "vscode-cli-cache-$ABLUNIT_TEST_RUNNER_OE_VERSION":/home/circleci/project/.vscode-test
kherring/ablunit-test-runner:"${ABLUNIT_TEST_RUNNER_OE_VERSION}"
bash -c "/home/circleci/ablunit-test-runner/docker/$SCRIPT.sh $OPTS;"
)
diff --git a/eslint.config.mjs b/eslint.config.mjs
index f8896fd4..a76b8ee4 100644
--- a/eslint.config.mjs
+++ b/eslint.config.mjs
@@ -86,21 +86,29 @@ export default [{
'@typescript-eslint/no-unnecessary-condition': 0,
'no-unused-vars': 'off',
- '@typescript-eslint/no-unused-vars': ['warn', {
- argsIgnorePattern: '^_',
- vars: 'all',
- args: 'none',
- ignoreRestSiblings: false,
- }],
+ '@typescript-eslint/no-unused-vars': [
+ 'error',
+ {
+ 'args': 'all',
+ 'argsIgnorePattern': '^_',
+ 'caughtErrors': 'all',
+ 'caughtErrorsIgnorePattern': '^_',
+ 'destructuredArrayIgnorePattern': '^_',
+ 'varsIgnorePattern': '^_',
+ 'ignoreRestSiblings': true,
+ }
+ ],
'@typescript-eslint/prefer-readonly': 'warn',
'@typescript-eslint/restrict-plus-operands': 'off',
- '@typescript-eslint/switch-exhaustiveness-check': 'warn',
+ '@typescript-eslint/switch-exhaustiveness-check': ['warn', {
+ considerDefaultExhaustiveForUnions: true,
+ }],
'promise/catch-or-return': 'warn',
'promise/no-callback-in-promise': 'off',
'promise/always-return': ['warn', {
- ignoreLastCallback: true,
+ ignoreLastCallback: true
}],
'no-console': 'warn',
diff --git a/openedge-project.json b/openedge-project.json
index d67ef36b..51cc3eed 100644
--- a/openedge-project.json
+++ b/openedge-project.json
@@ -1,7 +1,7 @@
{
"name": "ablunit-test-runner",
"buildPath": [
- { "type": "source", "path": "resources/VSCodeTestRunner", "includes": "**/*.p,**/*.cls"},
+ { "type": "source", "path": "resources/VSCodeTestRunner", "includes": "**/*.p,**/*.cls", "build": "resources/VSCodeTestRunner"},
{ "type": "propath", "path": "${DLC}/tty/ablunit.pl", "includes": "**/*" }
],
"buildDirectory": "target"
diff --git a/resources/VSCodeTestRunner/ABLUnitCore.p b/resources/VSCodeTestRunner/ABLUnitCore.p
index 2df3b6a4..0d004643 100644
--- a/resources/VSCodeTestRunner/ABLUnitCore.p
+++ b/resources/VSCodeTestRunner/ABLUnitCore.p
@@ -1,11 +1,17 @@
// This file replaces the standard ABLUnitCore.p when the basedir is
// included as part of the propath ahead of ablunit.pl.
+using VSCode.ABLUnit.Runner.ABLRunner.
+
block-level on error undo, throw.
create widget-pool.
define variable quitOnEnd as logical init false no-undo.
+define variable VERBOSE as logical no-undo.
+VERBOSE = (os-getenv('VERBOSE') = 'true' or os-getenv('VERBOSE') = '1').
+if VERBOSE then
+ run printPropath.
run main.
if quitOnEnd then
quit.
@@ -13,6 +19,13 @@ else
return.
////////// FUNCS AND PROCS //////////
+procedure printPropath :
+ message "PROPATH:~n" + replace(PROPATH, ',', '~n - ').
+ define variable cnt as integer no-undo.
+ do cnt = 1 to num-entries(propath, ','):
+ message ' - '+ entry(cnt, propath).
+ end.
+end procedure.
procedure createDatabaseAliases :
define variable aliasesSessionParam as character no-undo.
@@ -34,6 +47,7 @@ procedure createDatabaseAliases :
do dbCount = 1 to num-entries(aliasesSessionParam,';'):
assign aliasList = entry(dbCount, aliasesSessionParam,';').
assign databaseName = entry(1,aliasList).
+ if VERBOSE then message "databaseName=" + databaseName.
do aliasCount = 2 to num-entries(aliaslist,','):
assign aliasName = entry(aliasCount, aliasList).
@@ -64,12 +78,12 @@ function writeErrorToLog returns logical (outputLocation as character, msg as ch
else
log-manager:logfile-name = session:temp-dir + "ablunit.log".
end.
- log-manager:write-message (msg).
+ log-manager:write-message(msg).
return true.
end function.
procedure main :
- define variable ablRunner as class OpenEdge.ABLUnit.Runner.ABLRunner no-undo.
+ define variable ablRunner as class ABLRunner no-undo.
define variable testConfig as class OpenEdge.ABLUnit.Runner.TestConfig no-undo.
define variable updateFile as character no-undo.
@@ -80,7 +94,7 @@ procedure main :
testConfig = readTestConfig(getParameter(trim(trim(session:parameter,'"'),"'"), 'CFG')).
quitOnEnd = (testConfig = ?) or testConfig:quitOnEnd.
- ablRunner = new OpenEdge.ABLUnit.Runner.ABLRunner(testConfig, updateFile).
+ ablRunner = new ABLRunner(testConfig, updateFile).
ablRunner:RunTests().
// the `-catchStop 1` startup parameter is default in 11.7+
@@ -109,7 +123,10 @@ procedure main :
writeErrorToLog(testConfig:outputLocation, s:CallStack).
end.
if testConfig:ShowErrorMessage then
+ do:
message e:GetMessage(1) view-as alert-box error.
+ message e:CallStack.
+ end.
if testConfig:ThrowError then
undo, throw e.
end.
diff --git a/resources/VSCodeTestRunner/VSCode/ABLUnit/Runner/ABLRunner.cls b/resources/VSCodeTestRunner/VSCode/ABLUnit/Runner/ABLRunner.cls
new file mode 100644
index 00000000..cd69dd5f
--- /dev/null
+++ b/resources/VSCodeTestRunner/VSCode/ABLUnit/Runner/ABLRunner.cls
@@ -0,0 +1,112 @@
+using OpenEdge.ABLUnit.Model.TestEntity.
+using OpenEdge.ABLUnit.Model.TestRootModel.
+using OpenEdge.ABLUnit.Results.TestTypeResult.
+using OpenEdge.ABLUnit.Runner.TestConfig.
+using OpenEdge.Core.Collections.Array.
+
+class VSCode.ABLUnit.Runner.ABLRunner inherits OpenEdge.ABLUnit.Runner.ABLRunner :
+
+
+ define temp-table ttEntityMap no-undo
+ field entityId as integer
+ field entityName as character
+ field entityParent as character
+ index idx-1 is primary unique entityId.
+
+ define variable VERBOSE as logical no-undo.
+
+ constructor public ABLRunner ():
+ super().
+ VERBOSE = (os-getenv('VERBOSE') = 'true' or os-getenv('VERBOSE') = '1').
+ end constructor.
+
+ constructor public ABLRunner (inputConfig as TestConfig, updateFile as character):
+ super(inputConfig, updateFile).
+ VERBOSE = (os-getenv('VERBOSE') = 'true' or os-getenv('VERBOSE') = '1').
+ end constructor.
+
+ method public void createEntityMapRecord(entityId as integer, entityName as character, parentName as character):
+ if VERBOSE then message "createEntityMapRecord".
+ if can-find(ttEntityMap where ttEntityMap.entityId = entityId) then
+ return.
+ create ttEntityMap.
+ ttEntityMap.entityId = entityId.
+ ttEntityMap.entityName = entityName.
+ ttEntityMap.entityParent = parentName.
+ release ttEntityMap.
+ end method.
+
+ method public void mapEntityChildren(parentName as character, testEntities as Array):
+ if VERBOSE then message "mapEntityChildren".
+ define variable childTestEntity as TestEntity no-undo.
+ define variable i as integer no-undo.
+ do i = 1 to testEntities:Size:
+ childTestEntity = cast(testEntities:GetValue(i), TestEntity).
+ createEntityMapRecord(childTestEntity:id, childTestEntity:name, parentName).
+ mapEntityChildren(childTestEntity:name, childTestEntity:getTestEntities()).
+ end.
+ end method.
+
+ METHOD PUBLIC override TestEntity populateTestModel(INPUT testCase AS CHARACTER, INPUT testCount AS INTEGER):
+ if VERBOSE then message "populateTestModel".
+ define variable testModel as TestEntity no-undo.
+ testModel = super:populateTestModel(testCase, testCount).
+ mapEntityChildren(testModel:name, testModel:getTestEntities()).
+ return testModel.
+ END METHOD.
+
+ method public override void updateFile(updateFile as character, content as longchar, override as logical) :
+ if VERBOSE then message "updateFile".
+ define variable entityId as integer no-undo.
+ super:updateFile(updateFile, content, override).
+
+ if content begins 'TEST_END ' or
+ content begins 'TEST_EXCEPTION ' then
+ do:
+ entityId = integer(entry(2, content, ' ')).
+ if (entityId > 0) then
+ restartProfiler(getEntityName(entityId), entityId).
+ if error-status:error then
+ message "ERROR: " + error-status:get-message(1) + ' (' + string(error-status:get-number(1)) + ')'.
+ end.
+
+ end method.
+
+ method public character getEntityName (entityId as integer) :
+ if VERBOSE then message "getEntityName".
+ define variable entityName as character init '' no-undo.
+ define variable parentName as character init '' no-undo.
+ find ttEntityMap where
+ ttEntityMap.entityId = entityId
+ no-error.
+ if available ttEntityMap then
+ do:
+ entityName = ttEntityMap.entityName.
+ parentName = ttEntityMap.entityParent.
+ end.
+ return parentName + ' ' + entityName.
+ end method.
+
+ method public void restartProfiler(testinfo as character, entityId as integer) :
+ if VERBOSE then message "restartProfiler".
+ if entityId = 0 then
+ return. // TEST_ROOT
+
+ define variable extension as character no-undo.
+ define variable profile-file-name as character no-undo.
+ define variable basename as character no-undo.
+
+ if num-entries(profiler:description, '|') = 1 then
+ profiler:description = profiler:description + '|'.
+ entry(2, profiler:description, '|') = testinfo.
+
+ extension = entry(num-entries(profiler:file-name, '.'), profiler:file-name, '.').
+ basename = substring(profiler:file-name, 1, length(profiler:file-name) - length(extension) - 1).
+ profile-file-name = profiler:file-name.
+ profiler:file-name = basename + '_' + string(entityId) + '.' + extension.
+
+ profiler:write-data().
+ profiler:file-name = profile-file-name.
+ end method.
+
+end class.
diff --git a/scripts/common.sh b/scripts/common.sh
index 82d99b1a..e521c7f1 100755
--- a/scripts/common.sh
+++ b/scripts/common.sh
@@ -30,3 +30,11 @@ validate_version_updated() {
exit 1
fi
}
+
+log_it () {
+ echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}]" "$@"
+}
+
+log_error () {
+ echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}] ERROR:" "$@" >&2
+}
diff --git a/scripts/npm_clean.sh b/scripts/npm_clean.sh
index 16ad9e86..f7b37c4d 100755
--- a/scripts/npm_clean.sh
+++ b/scripts/npm_clean.sh
@@ -69,6 +69,8 @@ initialize () {
"results.prof"
"results.xml"
)
+
+ find resources -type f -name "*.r" -delete
}
delete_directories () {
diff --git a/scripts/sonar_test_results_merge.sh b/scripts/sonar_test_results_merge.sh
index 971f562b..ca986e43 100755
--- a/scripts/sonar_test_results_merge.sh
+++ b/scripts/sonar_test_results_merge.sh
@@ -1,11 +1,13 @@
#!/bin/bash
set -eou pipefail
+. scripts/common.sh
+
initialize () {
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}]"
+ log_it
rm -f artifacts/mocha_results_sonar/merged*.xml
if ! find artifacts/mocha_results_sonar -type f -name "*.xml"; then
- echo "ERROR: no *.xml files found in artifacts/mocha_results_sonar"
+ log_error "no *.xml files found in artifacts/mocha_results_sonar"
exit 1
else
echo "Directory is empty"
@@ -31,27 +33,26 @@ convert_and_merge_xml () {
${VERBOSE:-false} && cat artifacts/mocha_results_sonar_merged.xml
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}] merged test results for sonar consumption. output: artifacts/mocha_results_sonar_merged.xml"
+ log_it 'merged test results for sonar consumption. output: artifacts/mocha_results_sonar_merged.xml'
## Merge to json
xq -s '.' artifacts/mocha_results_xunit/*.xml > artifacts/mocha_results_xunit_merged.json
}
show_summary () {
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}]"
+ log_it
TEST_COUNT="$(jq '[.. | objects | .testcase//empty | .. | objects] | length' < artifacts/mocha_results_xunit_merged.json)"
- echo "[$(date +%Y-%m-%d:%H:%M:%S) TEST_COUNT=$TEST_COUNT"
+ log_it "TEST_COUNT=$TEST_COUNT"
SKIPPED="$(jq '[.. | objects | .testcase//empty | .. | objects | select(has("skipped")) ] | length' < artifacts/mocha_results_xunit_merged.json)"
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $SKIPPED/$TEST_COUNT tests skipped"
+ log_it "$SKIPPED/$TEST_COUNT tests skipped"
FAILURES="$(jq '[.. | objects | .testcase//empty | .. | objects | select(has("failure")) ] | length' < artifacts/mocha_results_xunit_merged.json)"
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $FAILURES/$TEST_COUNT tests failed"
jq '[.. | objects | .testcase//empty | .. | objects | select(has("failure")) ]' < artifacts/mocha_results_xunit_merged.json > artifacts/mocha_failures.json
if [ "$FAILURES" -eq 0 ]; then
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $FAILURES/$TEST_COUNT tests failed"
+ log_it "$FAILURES/$TEST_COUNT tests failed"
else
- echo "[$(date +%Y-%m-%d:%H:%M:%S) ERROR! $FAILURES/$TEST_COUNT tests failed"
+ log_it "ERROR! $FAILURES/$TEST_COUNT tests failed"
jq '.' artifacts/mocha_failures.json
- echo "[$(date +%Y-%m-%d:%H:%M:%S) exit with error code 1 due to $FAILURES failed tests"
+ log_it "exit with error code 1 due to $FAILURES failed tests"
exit 1
fi
}
diff --git a/scripts/validate.sh b/scripts/validate.sh
index ff892792..fc97da78 100755
--- a/scripts/validate.sh
+++ b/scripts/validate.sh
@@ -4,30 +4,28 @@ set -eou pipefail
. scripts/common.sh
validate_results_count() {
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}] VERBOSE='${VERBOSE:-}'"
+ log_it "VERBOSE='${VERBOSE:-}'"
EXPECTED_VSIX_COUNT=${EXPECTED_VSIX_COUNT:-1}
VERBOSE=${VERBOSE:-false}
TEST_COUNT=$(find test/suites -name "*.test.ts" | wc -l)
if [ ! -d artifacts ]; then
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}] ERROR: no 'artifacts' directory found"
+ log_it "ERROR: no 'artifacts' directory found"
exit 1
fi
- ARTIFACT_DIR="artifacts/${ABLUNIT_TEST_RUNNER_VSCODE_VERSION}-${ABLUNIT_TEST_RUNNER_OE_VERSION}"
-
- RESULTS_COUNT=$(find "$ARTIFACT_DIR" -name "mocha_results_junit*.xml" | sort -u | wc -l)
+ RESULTS_COUNT=$(find "artifacts/mocha_results_xunit" -name "*.xml" | sort -u | wc -l)
if [ "$RESULTS_COUNT" != "$TEST_COUNT" ]; then
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}] ERROR: results count != test count ($RESULTS_COUNT != $TEST_COUNT)"
+ log_ierror "ERROR: results count != test count ($RESULTS_COUNT != $TEST_COUNT)"
fi
LCOV_COUNT=$(find . -name 'lcov.info' | wc -l)
- if [ "$LCOV_COUNT" != "$TEST_COUNT" ]; then
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}] ERROR: LCOV_COUNT != 1 ($LCOV_COUNT != 1)"
+ if [ "$LCOV_COUNT" != "1" ]; then
+ log_error "ERROR: LCOV_COUNT != 1 ($LCOV_COUNT != 1)"
exit 1
fi
if ${VERBOSE:-true}; then
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}] TEST_COUNT=${TEST_COUNT:-}, RESULTS_COUNT=${RESULTS_COUNT:-}, LCOV_COUNT=${LCOV_COUNT:-}"
+ log_it "TEST_COUNT=${TEST_COUNT:-}, RESULTS_COUNT=${RESULTS_COUNT:-}, LCOV_COUNT=${LCOV_COUNT:-}"
find test/suites -name "*.test.ts" | sort
find "$ARTIFACT_DIR" -name "mocha_results_*.xml" | sort
find . -name 'lcov.info' | sort
@@ -35,7 +33,7 @@ validate_results_count() {
if [ -n "$ABLUNIT_TEST_RUNNER_PROJECT_NAME" ]; then
if [ "$RESULTS_COUNT" != "$TEST_COUNT" ] || [ "$LCOV_COUNT" != "$EXPECTED_VSIX_COUNT" ]; then
- echo "[$(date +%Y-%m-%d:%H:%M:%S) $0 ${FUNCNAME[0]}] ERROR: results count != test count ($RESULTS_COUNT != $TEST_COUNT) or LCOV_COUNT != 1 ($LCOV_COUNT != 1)"
+ log_error "ERROR: results count != test count ($RESULTS_COUNT != $TEST_COUNT) or LCOV_COUNT != 1 ($LCOV_COUNT != 1)"
return 1
fi
fi
@@ -44,4 +42,4 @@ validate_results_count() {
########## MAIN BLOCK ##########
validate_version_updated
validate_results_count
-echo "[$(date +%Y-%m-%d:%H:%M:%S) $0] completed successfully!"
+log_it 'completed successfully!'
diff --git a/src/ABLDebugLines.ts b/src/ABLDebugLines.ts
index ec3376df..a5b748dc 100644
--- a/src/ABLDebugLines.ts
+++ b/src/ABLDebugLines.ts
@@ -1,4 +1,3 @@
-import { Uri } from 'vscode'
import { PropathParser } from './ABLPropath'
import { log } from './ChannelLogger'
import { SourceMap } from './parse/SourceMapParser'
@@ -8,7 +7,7 @@ import { getSourceMapFromXref } from './parse/SourceMapXrefParser'
export class ABLDebugLines {
private readonly maps = new Map()
- private readonly processingMethodMap = new Map()
+ private readonly processingMethodMap = new Map()
propath: PropathParser
constructor (propath?: PropathParser) {
@@ -23,18 +22,6 @@ export class ABLDebugLines {
return this.maps.size
}
- getSourceLines (debugSource: string | Uri) {
- if (debugSource instanceof Uri) {
- debugSource = debugSource.fsPath
- }
- log.info('debugSource=' + debugSource)
- const map = this.maps.get(debugSource)
- if (!map) {
- throw new Error('no source map found (' + debugSource + ')')
- }
- return map.items
- }
-
getProcessingMethod (debugSource: string) {
return this.processingMethodMap.get(debugSource)
}
@@ -43,10 +30,26 @@ export class ABLDebugLines {
// if (debugSource.startsWith('OpenEdge.') || debugSource.includes('ABLUnitCore')) {
// return undefined
// }
+ const map = await this.getSourceMap(debugSource)
+ if (!map) {
+ return
+ }
+ const ret = map.items.find((line) => line.debugLine === debugLine)
+ return ret
+ }
+ private async getSourceMap (debugSource: string) {
if (!debugSource.endsWith('.p') && !debugSource.endsWith('.cls')) {
debugSource = debugSource.replace(/\./g, '/') + '.cls'
}
+ let map = this.maps.get(debugSource)
+ if (map) {
+ // return previously parsed map
+ return map
+ }
+ if (this.processingMethodMap.get(debugSource) === 'none') {
+ return undefined
+ }
const debugSourceObj = await this.propath.search(debugSource)
if (!debugSourceObj) {
@@ -54,24 +57,27 @@ export class ABLDebugLines {
return undefined
}
- let map = this.maps.get(debugSource)
- if (!map) {
- try {
- map = await getSourceMapFromRCode(this.propath, await this.propath.getRCodeUri(debugSource))
- this.processingMethodMap.set(debugSource, 'rcode')
- } catch (e) {
- log.warn('cannot parse source map from rcode, falling back to source parser (debugSource=' + debugSource + ', e=' + e + ')')
- map = await getSourceMapFromXref(this.propath, debugSource)
- this.processingMethodMap.set(debugSource, 'parse')
- }
+ // first, attempt to parse source map from rcode
+ try {
+ map = await getSourceMapFromRCode(this.propath, debugSourceObj.rcodeUri)
+ this.processingMethodMap.set(debugSource, 'rcode')
+ this.maps.set(debugSource, map)
+ return map
+ } catch (e) {
+ log.warn('failed to parse source map from rcode, falling back to source parser\n\tdebugSource=' + debugSource + '\n\te=' + e)
+ }
- if (!map) {
- throw new Error('failed to parse source map (' + debugSource + ')')
- } else {
- this.maps.set(debugSource, map)
- }
+ // if that fails, attempt to parse source map from xref
+ try {
+ map = await getSourceMapFromXref(this.propath, debugSource)
+ this.processingMethodMap.set(debugSource, 'parse')
+ this.maps.set(debugSource, map)
+ return map
+ } catch(e) {
+ log.warn('failed to parse source map from xref\n\tdebugSource=' + debugSource + '\n\te=' + e)
}
- const ret = map.items.find((line) => line.debugLine === debugLine)
- return ret
+
+ this.processingMethodMap.set(debugSource, 'none')
+ return map
}
}
diff --git a/src/ABLPromsgs.ts b/src/ABLPromsgs.ts
index 766c39ca..7235811b 100644
--- a/src/ABLPromsgs.ts
+++ b/src/ABLPromsgs.ts
@@ -152,7 +152,7 @@ export function getPromsgText (text: string) {
}
})
return stackString
- } catch (_e) {
+ } catch (_e: unknown) {
return text
}
}
diff --git a/src/ABLPropath.ts b/src/ABLPropath.ts
index 33dd074f..ee3fe185 100644
--- a/src/ABLPropath.ts
+++ b/src/ABLPropath.ts
@@ -3,7 +3,7 @@ import { IProjectJson } from './parse/OpenedgeProjectParser'
import { log } from './ChannelLogger'
import * as FileUtils from './FileUtils'
-export interface IPropathEntry {
+interface IPropathEntry {
uri: Uri
path: string
relativePath?: string
@@ -32,7 +32,6 @@ export class PropathParser {
filemap: Map
files: IABLFile[] = []
workspaceFolder: WorkspaceFolder
- buildmap: Map
propath: IPropath = {
entry: [] as IPropathEntry[]
@@ -45,7 +44,6 @@ export class PropathParser {
this.workspaceFolder = workspace.workspaceFolders![0]
}
this.filemap = new Map()
- this.buildmap = new Map()
let uri
if (this.workspaceFolder) {
@@ -125,34 +123,12 @@ export class PropathParser {
return this.propath
}
- getBuildDir (filepath: string) {
- return this.buildmap.get(filepath)
- }
-
- async getRCodeUri (filepath: string) {
- let bd = this.buildmap.get(filepath)
-
- if (!bd) {
- const found = await this.search(filepath)
- if (found) {
- bd = this.buildmap.get(filepath)
- }
- }
-
- if (!bd) {
- throw new Error('cannot find build dir for ' + filepath)
- }
-
- const rpath = Uri.joinPath(Uri.file(bd), filepath.replace(/\.(p|cls)$/, '.r'))
- return rpath
- }
-
private searchUri (uri: Uri) {
for (const e of this.propath.entry) {
if(uri.fsPath.replace(/\\/g, '/').startsWith(e.uri.fsPath.replace(/\\/g, '/') + '/')) {
const propathRelativeFile = uri.fsPath.replace(e.uri.fsPath, '').substring(1)
const relativeFile = workspace.asRelativePath(uri, false)
- const rcodeUri = Uri.joinPath(e.buildDirUri, relativeFile.replace(/\.(p|cls)$/, '.r'))
+ const rcodeUri = Uri.joinPath(e.buildDirUri, propathRelativeFile.replace(/\.(p|cls)$/, '.r'))
const xrefUri = Uri.joinPath(e.xrefDirUri, propathRelativeFile + '.xref')
const fileObj: IABLFile = {
@@ -166,7 +142,6 @@ export class PropathParser {
}
this.files.push(fileObj)
this.filemap.set(relativeFile, fileObj)
- this.buildmap.set(relativeFile, e.buildDirUri.fsPath)
return fileObj
}
}
@@ -211,7 +186,6 @@ export class PropathParser {
}
this.files.push(fileObj)
this.filemap.set(relativeFile, fileObj)
- this.buildmap.set(relativeFile, e.buildDirUri.fsPath)
return fileObj
}
}
diff --git a/src/ABLResults.ts b/src/ABLResults.ts
index ffa38df3..85eee0e0 100644
--- a/src/ABLResults.ts
+++ b/src/ABLResults.ts
@@ -1,23 +1,26 @@
-import { FileType, MarkdownString, TestItem, TestItemCollection, TestMessage, TestRun, Uri, workspace, WorkspaceFolder, Position,
+import { FileType, MarkdownString, TestItem, TestItemCollection, TestMessage, TestRun, Uri, workspace, WorkspaceFolder,
FileCoverage, FileCoverageDetail,
Disposable, CancellationToken, CancellationError,
- StatementCoverage,
- TestRunRequest,
- TestRunProfileKind} from 'vscode'
+ Position, Range,
+ DeclarationCoverage, StatementCoverage,
+ TestRunRequest, TestRunProfileKind} from 'vscode'
import { ABLUnitConfig } from './ABLUnitConfigWriter'
import { ABLResultsParser, ITestCaseFailure, ITestCase, ITestSuite } from './parse/ResultsParser'
import { ABLTestSuite, ABLTestData, ABLTestCase } from './testTree'
import { parseCallstack } from './parse/CallStackParser'
-import { ABLProfile, ABLProfileJson, IModule } from './parse/ProfileParser'
+import { ABLProfile, ABLProfileJson, checkSkipList, getModuleRange, IModule } from './parse/ProfileParser'
import { ABLDebugLines } from './ABLDebugLines'
import { ABLPromsgs, getPromsgText } from './ABLPromsgs'
import { PropathParser } from './ABLPropath'
import { log } from './ChannelLogger'
-import { ABLUnitRuntimeError, RunStatus, TimeoutError, ablunitRun } from './ABLUnitRun'
+import { RunStatus, ablunitRun } from './ABLUnitRun'
import { getDLC, IDlc } from './parse/OpenedgeProjectParser'
import { Duration } from './ABLUnitCommon'
import { ITestObj } from 'parse/config/CoreOptions'
import * as FileUtils from './FileUtils'
+import { basename, dirname } from 'path'
+import { globSync } from 'glob'
+import { ABLUnitRuntimeError, TimeoutError } from 'Errors'
export class ABLResults implements Disposable {
workspaceFolder: WorkspaceFolder
@@ -34,13 +37,15 @@ export class ABLResults implements Disposable {
propath: PropathParser
debugLines: ABLDebugLines
promsgs: ABLPromsgs
- profileJson?: ABLProfileJson
+ profileJson: ABLProfileJson[] = []
coverageJson: [] = []
dlc: IDlc
thrownError: Error | undefined
- public coverage: Map = new Map()
- public filecoverage: FileCoverage[] = []
+ public fileCoverage: Map = new Map()
+ public fileCoverageDetail: FileCoverageDetail[] = []
+ public declarationCoverage: Map = new Map()
+ public statementCoverage: Map = new Map()
constructor (workspaceFolder: WorkspaceFolder,
private readonly storageUri: Uri,
@@ -78,9 +83,7 @@ export class ABLResults implements Disposable {
dispose () {
this.setStatus(RunStatus.Cancelled, 'disposing ABLResults object')
- delete this.profileJson
delete this.ablResults
- delete this.profileJson
}
setStatus (status: RunStatus, statusNote?: string) {
@@ -101,13 +104,13 @@ export class ABLResults implements Disposable {
log.info('[start] workspaceFolder=' + this.workspaceFolder.uri.fsPath)
// eslint-disable-next-line @typescript-eslint/no-invalid-void-type
- const prom: (Thenable | Promise | Promise | undefined)[] = []
- prom[0] = this.cfg.createProfileOptions(this.cfg.ablunitConfig.profOptsUri, this.cfg.ablunitConfig.profiler)
- prom[1] = this.cfg.createProgressIni(this.propath.toString(), this.dlc)
- prom[2] = this.cfg.createAblunitJson(this.cfg.ablunitConfig.config_uri, this.cfg.ablunitConfig.options, this.testQueue)
- prom[3] = this.cfg.createDbConnPf(this.cfg.ablunitConfig.dbConnPfUri, this.cfg.ablunitConfig.dbConns)
+ const proms: (Thenable | Promise | Promise | undefined)[] = []
+ this.cfg.createProfileOptions(this.cfg.ablunitConfig.profOptsUri, this.cfg.ablunitConfig.profiler)
+ this.cfg.createDbConnPf(this.cfg.ablunitConfig.dbConnPfUri, this.cfg.ablunitConfig.dbConns)
+ proms.push(this.cfg.createProgressIni(this.propath.toString(), this.dlc))
+ proms.push(this.cfg.createAblunitJson(this.cfg.ablunitConfig.config_uri, this.cfg.ablunitConfig.options, this.testQueue))
- return Promise.all(prom).then(() => {
+ return Promise.all(proms).then(() => {
log.info('done creating config files for run')
return
}, (e: unknown) => {
@@ -237,13 +240,16 @@ export class ABLResults implements Disposable {
if (this.request.profile?.kind === TestRunProfileKind.Coverage && this.cfg.ablunitConfig.profiler.enabled && this.cfg.ablunitConfig.profiler.coverage) {
this.setStatus(RunStatus.Parsing, 'profiler data')
- log.debug('parsing profiler data from ' + workspace.asRelativePath(this.cfg.ablunitConfig.profFilenameUri.fsPath), options)
- await this.parseProfile().then(() => {
+ log.info('parsing profiler data...', options)
+ await this.parseProfile(options).then(() => {
log.info('parsing profiler data complete ' + parseTime.toString())
return true
}, (e: unknown) => {
this.setStatus(RunStatus.Error, 'profiler data')
log.error('Error parsing profiler data from ' + this.cfg.ablunitConfig.profFilenameUri.fsPath + '. e=' + e, options)
+ if (e instanceof Error) {
+ log.error('e.stack=' + e.stack)
+ }
throw new Error('Error parsing profiler data from ' + workspace.asRelativePath(this.cfg.ablunitConfig.profFilenameUri) + '\r\ne=' + e)
})
}
@@ -255,6 +261,7 @@ export class ABLResults implements Disposable {
async assignTestResults (item: TestItem, options: TestRun) {
if (this.skippedTests.includes(item)) {
+ log.warn('skipped test item \'' + item.label + '\'')
options.skipped(item)
return
}
@@ -291,22 +298,36 @@ export class ABLResults implements Disposable {
}
if (item.children.size > 0) {
this.parseChildSuites(item, s.testsuite, options)
+ }
+ if (s.errors > 0) {
+ log.error('errors = ' + s.errors + ', failures = ' + s.failures + ', passed = ' + s.passed + ' (item=' + item.label + ')')
+ options.errored(item, new TestMessage('errors = ' + s.errors + ', failures = ' + s.failures + ', passed = ' + s.passed))
+ } else if (s.failures) {
+ log.error('failures = ' + s.failures + ', passed = ' + s.passed + ' (item=' + item.label + ')')
+ options.failed(item, new TestMessage('failures = ' + s.failures + ', passed = ' + s.passed))
+ } else if (s.skipped) {
+ log.warn('skipped = ' + s.skipped + ', passed = ' + s.passed + ' (item=' + item.label + ')')
+ options.skipped(item)
} else {
- if (s.errors > 0) {
- log.error('errors = ' + s.errors + ', failures = ' + s.failures + ', passed = ' + s.passed + ' (item=' + item.label + ')')
- options.errored(item, new TestMessage('errors = ' + s.errors + ', failures = ' + s.failures + ', passed = ' + s.passed))
- } else if (s.failures) {
- log.error('failures = ' + s.failures + ', passed = ' + s.passed + ' (item=' + item.label + ')')
- options.failed(item, new TestMessage('failures = ' + s.failures + ', passed = ' + s.passed))
- } else if (s.skipped) {
- log.warn('skipped = ' + s.skipped + ', passed = ' + s.passed + ' (item=' + item.label + ')')
- options.skipped(item)
- } else {
- options.passed(item)
- }
+ log.info('passed = ' + s.passed + ' (item=' + item.label + ')')
+ options.passed(item)
}
} else {
this.parseFinalSuite(item, s, options)
+
+ if (s.errors > 0) {
+ log.error('s.errors=' + s.errors)
+ options.failed(item, new TestMessage(s.errors + ' errors'), this.duration.elapsed())
+ } else if (s.failures > 0) {
+ log.error('s.failures=' + s.failures)
+ options.failed(item, new TestMessage(s.failures + ' failures'), this.duration.elapsed())
+ } else if (s.skipped > 0) {
+ log.warn('skipped test case \'' + item.label + '\'')
+ options.skipped(item)
+ } else if (s.passed > 0 && s.errors == 0 && s.failures == 0) {
+ log.info('passed test case \'' + item.label + '\'')
+ options.passed(item)
+ }
}
}
@@ -445,82 +466,204 @@ export class ABLResults implements Disposable {
return tm
}
- parseProfile () {
- const startTime = new Date()
+ findTest (profileDescription: string | undefined) {
+ if (!profileDescription || profileDescription.split('|').length < 2) {
+ return undefined
+ }
+ let parentName = profileDescription.split('|')[1].split(' ')[0]
+ if (parentName.endsWith('.cls')) {
+ parentName = parentName.substring(0, parentName.length - 4)
+ }
+ const testName = profileDescription.split('|')[1].split(' ')[1]
+
+ const tests = []
+ for (const t of this.tests) {
+ tests.push(t)
+ for (const [, child] of t.children) {
+ tests.push(child)
+ }
+ }
+
+ const item = tests.find((t) => {
+ return t.parent?.label == parentName && t.label == testName
+ })
+
+ if (!item) {
+ log.warn('could not find test item for parent=' + parentName + ', testName=' + testName)
+ return undefined
+ }
+ return item
+ }
+
+ async parseProfile (options: TestRun) {
+ const duration = new Duration()
const profParser = new ABLProfile()
- return profParser.parseData(this.cfg.ablunitConfig.profFilenameUri, this.cfg.ablunitConfig.profiler.writeJson, this.debugLines)
- .then(() => {
- this.profileJson = profParser.profJSON
- return this.assignProfileResults()
- })
- .then(() => {
- log.debug('assignProfileResults complete (time=' + (Number(new Date()) - Number(startTime)) + ')')
- return
- }, (e: unknown) => {
- throw new Error('assignProfileResults error: ' + e)
+ const profDir = dirname(this.cfg.ablunitConfig.profFilenameUri.fsPath)
+ const profFile = basename(this.cfg.ablunitConfig.profFilenameUri.fsPath)
+ // . -> _*_*.
+ const globPattern = profFile.replace(/(.+)\.([a-zA-Z]+)$/, '$1_*.$2')
+ const dataFiles = [ basename(this.cfg.ablunitConfig.profFilenameUri.fsPath) ]
+ try {
+ dataFiles.push(...globSync(globPattern, { cwd: profDir }))
+ } catch(e) {
+ log.warn('globSync failed for ' + globPattern + '\n\te=' + e)
+ }
+ dataFiles.sort((a, b) => { return a.localeCompare(b) })
+
+ for (let i=0; i < dataFiles.length; i++) {
+ const uri = Uri.joinPath(Uri.file(profDir), dataFiles[i])
+ log.info('parsing profile data ' + i + '/' + dataFiles.length + ' from ' + uri.fsPath, options)
+
+ const prom = profParser.parseData(uri, this.cfg.ablunitConfig.profiler.writeJson, this.debugLines).then((profJson) => {
+ const item = this.findTest(profJson.description)
+ profJson.testItemId = item?.id
+ this.profileJson.push(profJson)
+ return this.assignProfileResults(profJson, item)
+ }).then(() => {
+ log.info('parsing profile data complete (' + i + '/' + dataFiles.length + ') ' + duration.toString())
+ return true
})
+ await prom
+ }
+ log.info('parsing profile data complete ' + duration.toString(), options)
}
- async assignProfileResults () {
- if (!this.profileJson) {
+ async assignProfileResults (profJson: ABLProfileJson, item: TestItem | undefined) {
+ if (!profJson) {
+ log.error('no profile data available...')
throw new Error('no profile data available...')
}
- const mods: IModule[] = this.profileJson.modules
- for (let idx=1; idx < mods.length; idx++) {
- const module = mods[idx]
- if (!module.SourceName) {
+ for (const module of profJson.modules) {
+ if (checkSkipList(module.SourceName)) {
continue
}
- // await this.setCoverage(module).then()
- await this.setCoverage(module)
+ await this.setCoverage(module, item)
}
}
- async setCoverage (module: IModule) {
- const fileinfo = await this.propath.search(module.SourceName)
+ getExecCount (module: IModule) {
+ const zeroLine = module.lines.find((a) => a.LineNo == 0)
+ return zeroLine?.ExecCount ?? 0
+ }
+
+ addDeclarationFromModule (uri: Uri, module: IModule) {
+ const fdc = this.declarationCoverage.get(uri.fsPath) ?? []
- const moduleUri = fileinfo?.uri
- if (!moduleUri) {
- if (!module.SourceName.startsWith('OpenEdge.') &&
- module.SourceName !== 'ABLUnitCore.p' &&
- module.SourceName !== 'Ccs.Common.Application') {
- log.error('could not find moduleUri for ' + module.SourceName)
+ let dc = fdc.find((c) => c.name == (module.EntityName ?? '', 0, range)
+ fdc.push(dc)
}
+ }
+ if (dc?.name == '') {
+ const executedLines = module.lines.filter((a) => a.ExecCount > 0)
+ if (executedLines.length > 0) {
+ dc.executed = true
+ }
+ } else if (typeof dc?.executed == 'number') {
+ dc.executed = dc.executed + this.getExecCount(module)
+ } else if (typeof dc?.executed == 'boolean') {
+ dc.executed = dc.executed || this.getExecCount(module) > 0
+ }
+
+ this.declarationCoverage.set(uri.fsPath, fdc)
+ }
+
+ sortLocation (a: DeclarationCoverage | StatementCoverage, b: DeclarationCoverage | StatementCoverage) {
+ let startPosA: Position
+ let startPosB: Position
+ let endPosA: Position | undefined
+ let endPosB: Position | undefined
+
+ if (a.location instanceof Position) {
+ startPosA = a.location
+ } else {
+ startPosA = a.location.start
+ }
+ if (b.location instanceof Position) {
+ startPosB = b.location
+ } else {
+ startPosB = b.location.start
+ }
+
+ const compStart = startPosA.compareTo(startPosB)
+ if (compStart != 0) {
+ return compStart
+ }
+
+ if (a.location instanceof Range) {
+ endPosA = a.location.end
+ }
+ if (b.location instanceof Range) {
+ endPosB = b.location.end
+ }
+ return endPosA?.compareTo(endPosB ?? startPosB) ?? 0
+ }
+
+ async setCoverage (module: IModule, item?: TestItem) {
+ if (checkSkipList(module.SourceName)) {
return
}
- for (const line of module.lines) {
- if (!fileinfo) {
- log.warn('file not found in propath: ' + line.srcUri)
- continue
- }
- if (line.LineNo <= 0) {
- // * -2 is a special case - need to handle this better
- // * 0 is a special case - method header
- continue
- }
+ const fileinfo = await this.propath.search(module.SourceUri ?? module.SourceName)
+ if (!fileinfo?.uri) {
+ log.warn('could not find module in propath: ' + module.SourceName + ' (' + module.ModuleID + ')')
+ return
+ }
- const dbg = await this.debugLines.getSourceLine(fileinfo?.propathRelativeFile, line.LineNo)
- if (!dbg) {
- continue
- }
- let fc = this.coverage.get(dbg.sourceUri.fsPath)
- if (!fc) {
- // create a new FileCoverage object if one didn't already exist
- fc = []
- this.coverage.set(dbg.sourceUri.fsPath, fc)
+ const zeroLine = module.lines.find((a) => a.LineNo == 0)
+ if (!zeroLine) {
+ log.warn('could not find zeroLine for ' + module.SourceName)
+ }
+
+ for (const child of module.childModules) {
+ this.addDeclarationFromModule(fileinfo.uri, child)
+ }
+ // ----- this next line would add the main block to the declaration coverage -----
+ // this.addDeclarationFromModule(fileinfo.uri, module)
+
+ const fsc = this.statementCoverage.get(fileinfo.uri.fsPath) ?? []
+ if (fsc.length === 0) {
+ this.statementCoverage.set(fileinfo.uri.fsPath, fsc)
+ }
+
+ const lines = module.lines
+ for (const child of module.childModules) {
+ lines.push(...child.lines.filter((l) => l.LineNo > 0))
+ }
+
+ for (const line of lines) {
+ if (line.LineNo <= 0) { continue }
+ const lineno = (line.incLine ?? line.LineNo) - 1
+ const coverageRange = new Position(lineno, 0)
+
+ let cov = fsc.find((c) => JSON.stringify(c.location) == JSON.stringify(coverageRange))
+ if (!cov) {
+ cov = new StatementCoverage(line.ExecCount ?? 0, coverageRange)
+ fsc.push(cov)
+ } else if (typeof cov.executed == 'number') {
+ cov.executed = cov.executed + (line.ExecCount ?? 0)
+ } else if (typeof cov.executed == 'boolean') {
+ cov.executed = cov.executed || line.ExecCount > 0
}
+ }
+
+ const fdc = this.declarationCoverage.get(fileinfo.uri.fsPath) ?? []
+ fdc.sort((a, b) => this.sortLocation(a, b))
+ fsc.sort((a, b) => this.sortLocation(a, b))
- // // TODO: end of range should be the end of the line, not the beginning of the next line
- const coverageRange = new Position(dbg.sourceLine - 1, 0)
- fc.push(new StatementCoverage(line.ExecCount ?? 0, coverageRange))
+ const fcd: FileCoverageDetail[] = []
+ fcd.push(...fdc, ...fsc)
+
+ const fc = FileCoverage.fromDetails(fileinfo.uri, fcd)
+ const fcOrig = this.fileCoverage.get(fileinfo.uri.fsPath)
+ fc.includesTests = fcOrig?.includesTests ?? []
+ if (item && !fc.includesTests.find((i) => i.id == item.id)) {
+ fc.includesTests.push(item)
}
- this.coverage.forEach((v, k) => {
- log.debug('coverage[' + k + '].length=' + v.length)
- const fileCov = FileCoverage.fromDetails(Uri.file(k), v)
- log.debug('Statement coverage for ' + k + ': ' + JSON.stringify(fileCov.statementCoverage))
- this.filecoverage.push(fileCov)
- })
+ this.fileCoverage.set(fileinfo.uri.fsPath, fc)
}
}
diff --git a/src/ABLUnitCommon.ts b/src/ABLUnitCommon.ts
index f0f7bc05..1d257589 100644
--- a/src/ABLUnitCommon.ts
+++ b/src/ABLUnitCommon.ts
@@ -6,6 +6,7 @@ export interface IExtensionTestReferences {
testController: TestController
recentResults: ABLResults[]
currentRunData: ABLResults[]
+ recentError: Error | undefined
}
export class Duration {
diff --git a/src/ABLUnitConfigWriter.ts b/src/ABLUnitConfigWriter.ts
index b84c0d92..0838a94c 100644
--- a/src/ABLUnitConfigWriter.ts
+++ b/src/ABLUnitConfigWriter.ts
@@ -1,4 +1,4 @@
-import { TestRunRequest, Uri, workspace, WorkspaceFolder } from 'vscode'
+import { TestRunProfileKind, TestRunRequest, Uri, workspace, WorkspaceFolder } from 'vscode'
import { log } from './ChannelLogger'
import { PropathParser } from './ABLPropath'
import { platform } from 'os'
@@ -6,6 +6,7 @@ import { getProfileConfig, RunConfig } from './parse/TestProfileParser'
import { CoreOptions, IABLUnitJson, ITestObj } from './parse/config/CoreOptions'
import { ProfilerOptions } from './parse/config/ProfilerOptions'
import { getOpenEdgeProfileConfig, IBuildPathEntry, IDatabaseConnection, IDlc, ProfileConfig } from './parse/OpenedgeProjectParser'
+import { dirname } from 'path'
import * as FileUtils from './FileUtils'
export const ablunitConfig = new WeakMap()
@@ -14,16 +15,18 @@ export class ABLUnitConfig {
// ablunitConfig: IABLUnitConfig = {}
ablunitConfig: RunConfig = {} as RunConfig
+ requestKind: TestRunProfileKind | undefined
setup (workspaceFolder: WorkspaceFolder, request: TestRunRequest) {
log.info('[ABLUnitConfigWriter setup] workspaceUri="' + workspaceFolder.uri.fsPath + '"')
this.ablunitConfig = getProfileConfig(workspaceFolder)
log.info('[ABLUnitConfigWriter constructor] setup complete! tempDir=' + this.ablunitConfig.tempDirUri.fsPath)
+ this.requestKind = request.profile?.kind
}
- async writeFile (uri: Uri, data: Uint8Array) {
- FileUtils.createDir(uri.with({ path: uri.path.split('/').slice(0, -1).join('/') }))
- return workspace.fs.writeFile(uri, data)
+ writeFile (uri: Uri, data: Uint8Array) {
+ FileUtils.createDir(dirname(uri.fsPath))
+ FileUtils.writeFile(uri, data)
}
createProgressIni (propath: string, dlc: IDlc) {
@@ -55,6 +58,7 @@ export class ABLUnitConfig {
FileUtils.deleteFile(this.ablunitConfig.optionsUri.jsonUri)
const out: IABLUnitJson = {
+ $comment: 'Generated by ablunit-test-runner vscode extension',
options: cfg,
tests: testQueue
}
@@ -64,9 +68,16 @@ export class ABLUnitConfig {
}
createProfileOptions (uri: Uri, profOpts: ProfilerOptions) {
- if (!profOpts.enabled) { return Promise.resolve() }
+ if (!profOpts.enabled) {
+ return
+ }
log.info('creating profiler options file: \'' + uri.fsPath + '\'')
+ FileUtils.deleteFile(this.ablunitConfig.profFilenameUri)
+ if (this.requestKind != TestRunProfileKind.Coverage) {
+ return
+ }
+
const opt: string[] = [
'## Generated by ablunit-test-runner vscode extension',
'-profiling',
@@ -91,8 +102,7 @@ export class ABLUnitConfig {
if (profOpts.traceFilter != '') {
opt.push('-traceFilter "' + profOpts.traceFilter + '"')
}
- FileUtils.deleteFile(this.ablunitConfig.profFilenameUri)
- return this.writeFile(uri, Uint8Array.from(Buffer.from(opt.join('\n') + '\n')))
+ this.writeFile(uri, Uint8Array.from(Buffer.from(opt.join('\n') + '\n')))
}
createDbConnPf (uri: Uri, dbConns: IDatabaseConnection[]) {
@@ -107,7 +117,8 @@ export class ABLUnitConfig {
lines.push(conn.connect)
}
if (lines.length > 0) {
- return this.writeFile(uri, Uint8Array.from(Buffer.from(lines.join('\n') + '\n')))
+ this.writeFile(uri, Uint8Array.from(Buffer.from(lines.join('\n') + '\n')))
+ return
}
throw new Error('unexpected error writing dbconns to ' + uri.fsPath)
}
diff --git a/src/ABLUnitRun.ts b/src/ABLUnitRun.ts
index 0e9c5cbc..85c2bb5b 100644
--- a/src/ABLUnitRun.ts
+++ b/src/ABLUnitRun.ts
@@ -1,4 +1,4 @@
-import { CancellationError, CancellationToken, TestRun, Uri, workspace } from 'vscode'
+import { CancellationError, CancellationToken, TestRun, TestRunProfileKind, Uri, workspace } from 'vscode'
import { ABLResults } from './ABLResults'
import { Duration } from './ABLUnitCommon'
import { SendHandle, Serializable, SpawnOptions, spawn } from 'child_process'
@@ -8,6 +8,7 @@ import { basename, dirname } from 'path'
import { globSync } from 'glob'
import * as fs from 'fs'
import * as FileUtils from './FileUtils'
+import { ABLUnitRuntimeError, TimeoutError } from 'Errors'
export enum RunStatus {
None = 10,
@@ -39,33 +40,6 @@ export enum RunStatusString {
'Error' = 82,
}
-export class ABLUnitRuntimeError extends Error {
- constructor (message: string, public promsgError: string, public cmd?: string) {
- super(message)
- this.name = 'ABLUnitRuntimeError'
- }
-}
-
-export interface ITimeoutError extends Error {
- duration: Duration
- limit: number
- cmd?: string
-}
-
-export class TimeoutError extends Error implements ITimeoutError {
- duration: Duration
- limit: number
- cmd?: string
-
- constructor (message: string, duration: Duration, limit: number, cmd: string) {
- super(message)
- this.name = 'TimeoutError'
- this.duration = duration
- this.limit = limit
- this.cmd = cmd
- }
-}
-
export const ablunitRun = async (options: TestRun, res: ABLResults, cancellation: CancellationToken) => {
const abort = new AbortController()
const { signal } = abort
@@ -143,7 +117,7 @@ export const ablunitRun = async (options: TestRun, res: ABLResults, cancellation
cmd.push('-pf', res.cfg.ablunitConfig.dbConnPfUri.fsPath)
}
- if (res.cfg.ablunitConfig.profiler.enabled) {
+ if (res.cfg.ablunitConfig.profiler.enabled && res.cfg.requestKind == TestRunProfileKind.Coverage) {
cmd.push('-profile', res.cfg.ablunitConfig.profOptsUri.fsPath)
}
@@ -167,7 +141,7 @@ export const ablunitRun = async (options: TestRun, res: ABLResults, cancellation
return cmdSanitized
}
- const parseRuntimeError = (stdout: string): string | false => {
+ const _parseRuntimeError = (stdout: string): string | false => {
// extract the last line that looks like a promsg format, assume it's an error to attach to a failing test case
const promsgRegex = /^.* \(\d+\)/
const lines = stdout.split('\n').reverse()
@@ -181,7 +155,7 @@ export const ablunitRun = async (options: TestRun, res: ABLResults, cancellation
}
const runCommand = () => {
- FileUtils.deleteFile(
+ FileUtils.deleteFile([
res.cfg.ablunitConfig.profFilenameUri,
// res.cfg.ablunitConfig.config_uri,
res.cfg.ablunitConfig.optionsUri.filenameUri,
@@ -189,7 +163,7 @@ export const ablunitRun = async (options: TestRun, res: ABLResults, cancellation
res.cfg.ablunitConfig.optionsUri.updateUri,
res.cfg.ablunitConfig.profFilenameUri,
// res.cfg.ablunitConfig.profOptsUri,
- )
+ ])
if (res.cfg.ablunitConfig.optionsUri.updateUri) {
fs.writeFileSync(res.cfg.ablunitConfig.optionsUri.updateUri.fsPath, '')
@@ -218,7 +192,7 @@ export const ablunitRun = async (options: TestRun, res: ABLResults, cancellation
args.shift()
if (res.cfg.ablunitConfig.optionsUri.updateUri) {
- watcher = fs.watchFile(res.cfg.ablunitConfig.optionsUri.updateUri.fsPath, (curr, prev) => {
+ watcher = fs.watchFile(res.cfg.ablunitConfig.optionsUri.updateUri.fsPath, (_curr, _prev) => {
processUpdates(options, res.tests, res.cfg.ablunitConfig.optionsUri.updateUri)
})
}
@@ -287,7 +261,7 @@ export const ablunitRun = async (options: TestRun, res: ABLResults, cancellation
if (code && code != 0) {
res.setStatus(RunStatus.Error, 'exit_code=' + code)
log.info('----- ABLUnit Test Run Failed (exit_code=' + code + ') ----- ' + testRunDuration, options)
- reject(new ABLUnitRuntimeError('ABLUnit exit_code= ' + code, 'ABLUnit exit_code= ' + code + '; signal=' + signal, cmd))
+ reject(new ABLUnitRuntimeError('ABLUnit exit_code=' + code, 'ABLUnit exit_code=' + code + '; signal=' + signal, cmd))
return
}
@@ -296,7 +270,7 @@ export const ablunitRun = async (options: TestRun, res: ABLResults, cancellation
resolve('success')
}).on('close', (code: number | null, signal: NodeJS.Signals | null) => {
log.info('process.close code=' + code + '; signal=' + signal + '; process.exitCode=' + process.exitCode + '; process.signalCode=' + process.signalCode + '; killed=' + process.killed)
- }).on('message', (m: Serializable, h: SendHandle) => {
+ }).on('message', (m: Serializable, _h: SendHandle) => {
log.info('process.on.message m=' + JSON.stringify(m))
})
})
diff --git a/src/ChannelLogger.ts b/src/ChannelLogger.ts
index 122b78d9..7f2ce93c 100644
--- a/src/ChannelLogger.ts
+++ b/src/ChannelLogger.ts
@@ -82,15 +82,14 @@ class Logger {
this.writeMessage(LogLevel.Error, message, testRun)
}
- notification (message: string, notificationType: NotificationType = NotificationType.Info) {
- const logMessage = 'NOTIFICATION: ' + message + ' (type=' + notificationType + ', enabled=' + this.notificationsEnabled + ')'
+ private notification (message: string, notificationType: NotificationType = NotificationType.Info) {
+ const logMessage = 'NOTIFICATION: ' + message + ' (enabled=' + this.notificationsEnabled + ')'
switch (notificationType) {
case NotificationType.Info:
log.info(logMessage)
if (this.notificationsEnabled) {
void window.showInformationMessage(message)
}
- void window.showInformationMessage(message)
break
case NotificationType.Warn:
log.warn(logMessage)
@@ -103,21 +102,16 @@ class Logger {
}
}
- notificationWarningSync (message: string) {
- log.warn(message)
- return window.showWarningMessage(message)
+ notificationInfo (message: string) {
+ this.notification(message, NotificationType.Info)
}
notificationWarning (message: string) {
- log.warn(message)
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- const p = window.showWarningMessage(message).then(() => { return }, () => { return })
- return
+ this.notification(message, NotificationType.Warn)
}
notificationError (message: string) {
- log.error(message)
- return window.showErrorMessage(message)
+ this.notification(message, NotificationType.Error)
}
private writeMessage (messageLevel: LogLevel, message: string, testRun?: TestRun, includeStack = false) {
diff --git a/src/Errors.ts b/src/Errors.ts
new file mode 100644
index 00000000..6d370adf
--- /dev/null
+++ b/src/Errors.ts
@@ -0,0 +1,35 @@
+import { Duration } from 'ABLUnitCommon'
+
+export class NotImplementedError extends Error {
+ constructor (message: string) {
+ super(message)
+ this.name = 'NotImplementedError'
+ }
+}
+
+export class ABLUnitRuntimeError extends Error {
+ constructor (message: string, public promsgError: string, public cmd?: string) {
+ super(message)
+ this.name = 'ABLUnitRuntimeError'
+ }
+}
+
+export interface ITimeoutError extends Error {
+ duration: Duration
+ limit: number
+ cmd?: string
+}
+
+export class TimeoutError extends Error implements ITimeoutError {
+ duration: Duration
+ limit: number
+ cmd?: string
+
+ constructor (message: string, duration: Duration, limit: number, cmd: string) {
+ super(message)
+ this.name = 'TimeoutError'
+ this.duration = duration
+ this.limit = limit
+ this.cmd = cmd
+ }
+}
diff --git a/src/FileUtils.ts b/src/FileUtils.ts
index 1a2e8353..548f2eac 100644
--- a/src/FileUtils.ts
+++ b/src/FileUtils.ts
@@ -1,39 +1,39 @@
import * as fs from 'fs'
+import * as fsp from 'fs/promises'
import JSON_minify from 'node-json-minify'
-import { Uri, workspace } from 'vscode'
+import { FileSystemError, Uri, workspace } from 'vscode'
import { log } from 'ChannelLogger'
-
-class FileNotFoundError extends Error {
- public uri: Uri | undefined = undefined
-
- constructor (public readonly path: string | Uri) {
- super('file not found: ' + path)
- this.name = 'ABLUnitRuntimeError'
- if (path instanceof Uri) {
- this.uri = path
- this.path = path.fsPath
- } else {
- this.path = path
- if (isRelativePath(path)) {
- this.uri = Uri.joinPath(workspace.workspaceFolders![0].uri, path)
- } else {
- this.uri = Uri.file(path)
- }
+import { RmOptions } from 'fs'
+
+export function readFileSync (path: string | Uri, opts?: { encoding?: null; flag?: string; } | null): Buffer {
+ try {
+ return fs.readFileSync(path instanceof Uri ? path.fsPath : path, opts)
+ } catch (e: unknown) {
+ // @ts-expect-error this is safe
+ switch (e.code) {
+ case 'ENOENT':
+ throw FileSystemError.FileNotFound(path)
+ case 'EACCES':
+ throw FileSystemError.NoPermissions('permission denied: ' + path)
+ case 'EISDIR':
+ throw FileSystemError.FileIsADirectory(path)
+ default:
+ if (e instanceof Error) {
+ const err = e as FileSystemError
+ throw err
+ }
+ throw new FileSystemError('Uncategorized error! e=' + e)
}
}
}
-export function readFileSync (path: string | Uri, opts?: { encoding?: null; flag?: string; } | null) {
- return fs.readFileSync(path instanceof Uri ? path.fsPath : path, opts)
-}
-
-export function readLinesFromFileSync (uri: Uri) {
+export function readLinesFromFileSync (uri: Uri): string[] {
const content = readFileSync(uri).toString()
const lines = content.replace(/\r/g, '').split('\n')
return lines
}
-export function readStrippedJsonFile (uriOrPath: Uri | string) {
+export function readStrippedJsonFile (uriOrPath: Uri | string): object {
let path: string
if (uriOrPath instanceof Uri) {
path = uriOrPath.fsPath
@@ -46,25 +46,28 @@ export function readStrippedJsonFile (uriOrPath: Uri | string) {
return ret
}
-export function writeFile (path: string | Uri, data: string | Uint8Array, options?: fs.WriteFileOptions) {
+export function writeFile (path: string | Uri, data: string | Uint8Array, options?: fs.WriteFileOptions): void {
if (path instanceof Uri) {
path = path.fsPath
}
fs.writeFileSync(path, data, options)
}
-export function validateFile (path: string | Uri) {
+export function validateFile (path: string | Uri): boolean {
if (path instanceof Uri) {
if (!doesFileExist(path)) {
- throw new FileNotFoundError(path)
+ throw FileSystemError.FileNotFound(path)
}
return true
}
return true
}
-export function toUri (path: string, base?: string) {
+export function toUri (path: string | Uri, base?: string): Uri {
+ if (path instanceof Uri) {
+ return path
+ }
if (base && isRelativePath(path)) {
let uri = Uri.file(base)
uri = Uri.joinPath(uri, path)
@@ -77,22 +80,22 @@ export function toUri (path: string, base?: string) {
}
return Uri.joinPath(workspace.workspaceFolders[0].uri, path)
}
- throw new Error('path is relative but no base provided: ' + path)
+ throw new FileSystemError('No basedir provided for relative path: ' + path)
}
-
return Uri.file(path)
}
-export function isRelativePath (path: string) {
+export function isRelativePath (path: string): boolean {
if(path.startsWith('/') || RegExp(/^[a-zA-Z]:[\\/]/).exec(path)) {
return false
- } else {
- return true
}
+ return true
}
-
-function doesPathExist (uri: Uri, type?: 'file' | 'directory') {
+function doesPathExist (uri: Uri | string, type?: 'file' | 'directory'): boolean {
+ if (!(uri instanceof Uri)) {
+ uri = Uri.file(uri)
+ }
const exist = fs.existsSync(uri.fsPath)
if (!exist || !type) {
return false
@@ -106,54 +109,100 @@ function doesPathExist (uri: Uri, type?: 'file' | 'directory') {
return false
}
-
-export function doesFileExist (uri: Uri) {
+export function doesFileExist (uri: Uri | string): boolean {
return doesPathExist(uri, 'file')
}
-
-export function doesDirExist (uri: Uri) {
+export function doesDirExist (uri: Uri | string): boolean {
return doesPathExist(uri, 'directory')
}
-export function createDir (uri: Uri) {
+export function createDir (uri: Uri | string): void {
+ uri = toUri(uri)
if (!doesPathExist(uri, 'directory')) {
if (doesPathExist(uri)) {
- throw new Error('path exists but is not a directory: ' + uri.fsPath)
+ throw FileSystemError.FileNotADirectory(uri)
}
fs.mkdirSync(uri.fsPath, { recursive: true })
}
}
-function deletePath (type: 'directory' | 'file', uris: (Uri | undefined)[]) {
- if (uris.length == 0) {
+function deletePath (type: 'directory' | 'file', uris: Uri[], options: RmOptions = { force: true, recursive: true }): void {
+ if (!uris || uris.length == 0) {
return
}
+
+ if (options.recursive !== undefined) {
+ options.recursive = true
+ }
+
for (const uri of uris) {
if (!uri) {
continue
}
- if (doesPathExist(uri, type)) {
- fs.rmSync(uri.fsPath, { recursive: true })
- continue
- }
- if (doesPathExist(uri)) {
- throw new Error('path exists but is not a ' + type + ': ' + uri.fsPath)
+ try {
+ fs.rmSync(uri.fsPath, options)
+ } catch (e: unknown) {
+ if (e instanceof Error) {
+ const err = e as FileSystemError
+ if (err.code != 'ENOENT') {
+ throw err
+ }
+ log.debug('deletePath: ' + type + ' does not exist: ' + uri.fsPath)
+ } else {
+ throw e
+ }
}
}
}
-export function deleteFile (...files: (Uri | undefined)[]) {
- deletePath('file', files)
+export function deleteFile (file: Uri | undefined | (Uri | undefined)[], options?: RmOptions): void {
+ if (!file) return
+ let files: Uri[] = []
+ if (file instanceof Uri) {
+ files = [file]
+ } else if (file) {
+ files = file.filter((f) => f != undefined)
+ }
+ deletePath('file', files, options)
}
-export function deleteDir (...dirs: (Uri | undefined)[]) {
- deletePath('directory', dirs)
+export function deleteDir (dir: Uri | undefined | (Uri | undefined)[], options?: RmOptions): void {
+ if (!dir) return
+ let dirs: Uri[] = []
+ if (dir instanceof Uri) {
+ dirs = [dir]
+ } else if (dir) {
+ dirs = dir.filter((d) => d != undefined)
+ }
+ deletePath('directory', dirs, options)
}
-export function copyFile (source: Uri, target: Uri, opts?: fs.CopySyncOptions) {
+export function copyFile (source: Uri | string, target: Uri | string, _opts?: fs.CopySyncOptions): void {
+ source = toUri(source)
+ target = toUri(target)
if (!doesFileExist(source)) {
- log.warn('source file does not exist: ' + source.fsPath)
+ log.warn('copyFile failed! source file does not exist: ' + source.fsPath)
+ }
+ fs.copyFileSync(source.fsPath, target.fsPath)
+}
+
+export function copyFileAsync (source: Uri | string, target: Uri | string): Promise {
+ source = toUri(source)
+ target = toUri(target)
+ if (!doesFileExist(source)) {
+ log.warn('copyFile failed! source file does not exist: ' + source.fsPath)
+ return Promise.resolve()
+ }
+ return fsp.copyFile(source.fsPath, target.fsPath)
+}
+
+export function renameFile (source: Uri | string, target: Uri | string): void {
+ source = toUri(source)
+ target = toUri(target)
+ if (!doesFileExist(source)) {
+ log.warn('renameFile failed! source file does not exist: ' + source.fsPath)
+ return
}
- fs.cpSync(source.fsPath, target.fsPath, opts)
+ fs.renameSync(source.fsPath, target.fsPath)
}
diff --git a/src/extension.ts b/src/extension.ts
index 166f7253..6ba9bd99 100644
--- a/src/extension.ts
+++ b/src/extension.ts
@@ -1,12 +1,12 @@
import {
- CancellationError,
- CancellationToken, ConfigurationChangeEvent, ExtensionContext,
+ CancellationError, CancellationToken, CancellationTokenSource, ConfigurationChangeEvent, ExtensionContext,
ExtensionMode,
FileCoverage,
FileCoverageDetail,
FileCreateEvent,
LogLevel,
Position, Range, RelativePattern, Selection,
+ StatementCoverage,
TestController, TestItem, TestItemCollection, TestMessage,
TestRun,
TestRunProfileKind, TestRunRequest,
@@ -21,15 +21,16 @@ import { log } from './ChannelLogger'
import { getContentFromFilesystem } from './parse/TestParserCommon'
import { ABLTestCase, ABLTestClass, ABLTestData, ABLTestDir, ABLTestFile, ABLTestProgram, ABLTestSuite, resultData, testData } from './testTree'
import { minimatch } from 'minimatch'
-import { ABLUnitRuntimeError, TimeoutError } from 'ABLUnitRun'
+import { ABLUnitRuntimeError, TimeoutError } from 'Errors'
import { basename } from 'path'
import * as FileUtils from './FileUtils'
import { gatherAllTestItems, IExtensionTestReferences } from 'ABLUnitCommon'
+import { getDeclarationCoverage } from 'parse/ProfileParser'
let recentResults: ABLResults[] = []
let recentError: Error | undefined = undefined
-export async function activate (context: ExtensionContext) {
+export function activate (context: ExtensionContext) {
const ctrl = tests.createTestController('ablunitTestController', 'ABLUnit Test')
let currentTestRun: TestRun | undefined = undefined
let isRefreshTestsComplete = false
@@ -41,7 +42,7 @@ export async function activate (context: ExtensionContext) {
const contextStorageUri = context.storageUri ?? Uri.file(process.env['TEMP'] ?? '') // will always be defined as context.storageUri
const contextResourcesUri = Uri.joinPath(context.extensionUri, 'resources')
- setContextPaths(contextStorageUri, contextResourcesUri, context.logUri)
+ setContextPaths(contextStorageUri)
FileUtils.createDir(contextStorageUri)
context.subscriptions.push(ctrl)
@@ -56,17 +57,52 @@ export async function activate (context: ExtensionContext) {
commands.registerCommand('_ablunit.getTestController', () => { return ctrl }),
commands.registerCommand('_ablunit.getTestData', () => { return testData.getMap() }),
commands.registerCommand('_ablunit.getTestItem', (uri: Uri) => { return getExistingTestItem(ctrl, uri) }),
- commands.registerCommand('_ablunit.getTestRunError', () => { return recentError })
+ commands.registerCommand('_ablunit.getTestRunError', () => { return recentError }),
+ commands.registerCommand('_loadDetailedCoverageForTest', (uri: Uri, testId: string) => {
+ if (!currentTestRun) {
+ throw new Error('currentTestRun is undefined')
+ }
+
+ const fileCoverage = recentResults[0].fileCoverage.get(uri.fsPath)
+ if (!fileCoverage) {
+ throw new Error('fileCoverage not found for ' + uri.fsPath)
+ }
+
+ const tests = []
+ for (const test of recentResults[0].tests) {
+ tests.push(test, ...gatherTestItems(test.children))
+ }
+ const fromTest = tests.find((a) => a.id == testId)
+ if (!fromTest) {
+ throw new Error('TestItem not found for ' + testId)
+ }
+
+ return loadDetailedCoverageForTest(currentTestRun, fileCoverage, fromTest, new CancellationTokenSource().token)
+ })
)
}
context.subscriptions.push(
commands.registerCommand('_ablunit.openCallStackItem', openCallStackItem),
workspace.onDidChangeConfiguration(e => { return updateConfiguration(e) }),
- workspace.onDidOpenTextDocument(e => { log.info('workspace.onDidOpenTextDocument'); return createOrUpdateFile(ctrl, e.uri, true) }),
+ workspace.onDidOpenTextDocument(e => {
+ if (e.uri.scheme != 'file') {
+ return
+ }
+ if (workspace.getWorkspaceFolder(e.uri) === undefined) {
+ return
+ }
+ return createOrUpdateFile(ctrl, e.uri, true)
+ }),
workspace.onDidChangeTextDocument(e => { return didChangeTextDocument(e, ctrl) }),
- workspace.onDidCreateFiles(e => { log.info('workspace.onDidCreate ' + e.files[0].fsPath); return createOrUpdateFile(ctrl, e, true) }),
- workspace.onDidDeleteFiles(e => { log.info('workspace.onDidDelete ' + e.files[0].fsPath); return deleteFiles(ctrl, e.files) }),
+ workspace.onDidCreateFiles(e => {
+ log.info('workspace.onDidCreate ' + e.files[0].fsPath)
+ return createOrUpdateFile(ctrl, e, true)
+ }),
+ workspace.onDidDeleteFiles(e => {
+ log.info('workspace.onDidDelete ' + e.files[0].fsPath)
+ return deleteTestsInFiles(ctrl, e.files)
+ }),
// ...startWatchingWorkspace(ctrl),
)
@@ -81,14 +117,15 @@ export async function activate (context: ExtensionContext) {
}
const ret = {
testController: ctrl,
- recentResults: recentResults,
- currentRunData: data
+ recentResults,
+ currentRunData: data,
+ recentError
} as IExtensionTestReferences
log.debug('_ablunit.getExtensionTestReferences currentRunData.length=' + ret.currentRunData?.length + ', recentResults.length=' + ret.recentResults?.length)
return ret
}
- const runHandler = (request: TestRunRequest, token: CancellationToken): Promise => {
+ const runHandler = (request: TestRunRequest, token: CancellationToken) => {
if (request.continuous) {
throw new Error('continuous test runs not implemented')
}
@@ -102,19 +139,79 @@ export async function activate (context: ExtensionContext) {
})
}
- const loadDetailedCoverage = (testRun: TestRun, fileCoverage: FileCoverage, token: CancellationToken): Thenable => {
- log.info('loadDetailedCoverage uri="' + fileCoverage.uri.fsPath + '", testRun=' + testRun.name)
- const d = resultData.get(testRun)
- const det: FileCoverageDetail[] = []
- if (d) {
- d.flatMap((r) => {
- const rec = r.coverage.get(fileCoverage.uri.fsPath)
- if (rec) {
- det.push(...rec)
+ const loadDetailedCoverageForTest = (
+ testRun: TestRun,
+ fileCoverage: FileCoverage,
+ fromTestItem: TestItem,
+ _token: CancellationToken): Promise => {
+
+ const ret: FileCoverageDetail[] = []
+
+ // log.info('loadDetailedCoverageForTest uri="' + fileCoverage.uri.fsPath + '", testRun=' + testRun.name)
+ const results = resultData.get(testRun)
+ if (!results) {
+ log.error('test run has no associated results')
+ throw new Error('test run has no associated results')
+ }
+
+ for (const res of results) {
+ const profJson = res.profileJson.find((prof) => prof.testItemId == fromTestItem.id)
+ if (!profJson) {
+ log.warn('no profile data found for test item ' + fromTestItem.id)
+ continue
+ }
+ const module = profJson.modules.find((mod) => mod.SourceUri?.fsPath == fileCoverage.uri.fsPath)
+ if (!module) {
+ log.warn('no module data found for ' + fileCoverage.uri.fsPath)
+ continue
+ }
+
+ const lines = module.childModules.map((a) => a.lines).flat()
+ for (const line of lines) {
+ if (line.LineNo == 0) {
+ continue
}
- })
+ const coverageLocation = new Position(line.LineNo - 1, 0)
+ const sc = ret.find((a) => JSON.stringify(a.location) == JSON.stringify(coverageLocation))
+ if (!sc) {
+ ret.push(new StatementCoverage(line.ExecCount, coverageLocation))
+ } else if (typeof sc.executed == 'boolean') {
+ sc.executed = sc.executed || line.ExecCount > 0
+ } else if (typeof sc.executed == 'number') {
+ sc.executed = sc.executed + line.ExecCount
+ } else {
+ throw new Error('unexpected type for sc.executed: ' + typeof sc.executed)
+ }
+ }
+
+ ret.push(...getDeclarationCoverage(module))
+ }
+ if (ret.length == 0) {
+ log.warn('no coverage data found for ' + fileCoverage.uri.fsPath + ' by test item ' + fromTestItem.id)
}
- return Promise.resolve(det)
+ return Promise.resolve(ret)
+ }
+
+ const loadDetailedCoverage = (testRun: TestRun, fileCoverage: FileCoverage, _token: CancellationToken) => {
+ const ret: FileCoverageDetail[] = []
+ const results = resultData.get(testRun) ?? recentResults
+ if (!results) {
+ log.error('test run has no associated results')
+ throw new Error('test run has no associated results')
+ }
+
+ for (const result of results) {
+ const lc = result.statementCoverage.get(fileCoverage.uri.fsPath)
+ if (lc) {
+ ret.push(...lc)
+ }
+ const dc = result.declarationCoverage.get(fileCoverage.uri.fsPath)
+ if (dc) {
+ ret.push(...dc)
+ }
+ }
+
+ return Promise.resolve(ret)
}
async function openTestRunConfig () {
@@ -202,9 +299,11 @@ export async function activate (context: ExtensionContext) {
log.error('---------- ablunit run cancelled ----------', run)
// log.error('[runTestQueue] ablunit run cancelled!', run)
} else if (e instanceof ABLUnitRuntimeError) {
- log.error('ablunit runtime error!\ne=' + JSON.stringify(e))
+ log.error('ablunit runtime error!\n\te=' + JSON.stringify(e))
} else if (e instanceof TimeoutError) {
log.error('ablunit run timed out!')
+ } else if (e instanceof Error) {
+ log.error('ablunit run failed! e=' + e + '\n' + e.stack)
} else {
log.error('ablunit run failed!: ' + e, run)
// log.error('ablunit run failed parsing results with exception: ' + e, run)\
@@ -284,19 +383,20 @@ export async function activate (context: ExtensionContext) {
if (request.profile?.kind === TestRunProfileKind.Coverage) {
log.info('adding coverage results to test run')
- for (const res of data) {
- log.info('res.filecoverage.length=' + res.filecoverage.length)
- if (res.filecoverage.length === 0) {
- log.warn('no coverage data found (profile data path=' + res.cfg.ablunitConfig.profFilenameUri.fsPath + ')')
+ for (let i=0; i < recentResults.length; i++) {
+ const res = recentResults[i]
+ if (res.fileCoverage.size === 0) {
+ log.warn('no coverage data found (' + (i + 1) + '/' + recentResults.length + ')' +
+ '\n\t- profile data path=' + res.cfg.ablunitConfig.profFilenameUri.fsPath + ')')
}
- res.filecoverage.forEach((c) => {
+ res.fileCoverage.forEach((c) => {
run.addCoverage(c)
})
}
}
run.end()
- log.notification('ablunit tests complete')
+ log.notificationInfo('ablunit tests complete')
return
}
@@ -339,14 +439,17 @@ export async function activate (context: ExtensionContext) {
return res
}
- log.notification('running ablunit tests')
+ log.notificationInfo('running ablunit tests')
const queue: { test: TestItem; data: ABLTestData }[] = []
const run = ctrl.createTestRun(request)
+ run.onDidDispose(() => {
+ log.info('test run disposed run.name=' + run.name)
+ // TODO - delete ABLResults objects, delete artifacts possibly too
+ })
currentTestRun = run
cancellation.onCancellationRequested(() => {
log.debug('cancellation requested - createABLResults-2')
run.end()
- log.trace('run.end()')
throw new CancellationError()
})
const tests = request.include ?? gatherTestItems(ctrl.items)
@@ -369,7 +472,7 @@ export async function activate (context: ExtensionContext) {
})
}
- function updateNodeForDocument (e: TextDocument | TestItem | Uri, r: string) {
+ function updateNodeForDocument (e: TextDocument | TestItem | Uri) {
let u: Uri | undefined
if (e instanceof Uri) {
u = e
@@ -382,7 +485,6 @@ export async function activate (context: ExtensionContext) {
if (u.scheme != 'file') {
return Promise.resolve()
}
- log.info('u = ' + JSON.stringify(u))
if (workspace.getWorkspaceFolder(u) === undefined) {
log.info('skipping updateNodeForDocument for file not in workspace: ' + u.fsPath)
return Promise.resolve(false)
@@ -406,7 +508,7 @@ export async function activate (context: ExtensionContext) {
}
if (item.uri) {
- return updateNodeForDocument(item, 'resolve').then(() => {
+ return updateNodeForDocument(item).then(() => {
return
})
}
@@ -418,6 +520,20 @@ export async function activate (context: ExtensionContext) {
return Promise.resolve()
}
+ // ctrl.invalidateTestResults = (items?: TestItem | readonly TestItem[]) => {
+ // log.info('ctrl.invalidateTestResults')
+ // if (items instanceof Array) {
+ // log.info(' - items.length=' + items.length)
+ // for(let i=0; i < items.length; i++) {
+ // log.info(' - itesm[' + i + '].id=' + items[i].id)
+ // }
+ // } else {
+ // log.info(' - items.id=' + items?.id)
+ // }
+ // }
+
+
+
ctrl.refreshHandler = (token: CancellationToken) => {
log.info('ctrl.refreshHandler start')
isRefreshTestsComplete = false
@@ -469,23 +585,25 @@ export async function activate (context: ExtensionContext) {
const testProfileRun = ctrl.createRunProfile('Run Tests', TestRunProfileKind.Run, runHandler, true, new TestTag('runnable'), false)
// const testProfileDebug = ctrl.createRunProfile('Debug Tests', TestRunProfileKind.Debug, runHandler, false, new TestTag('runnable'), false)
const testProfileCoverage = ctrl.createRunProfile('Run Tests w/ Coverage', TestRunProfileKind.Coverage, runHandler, true, new TestTag('runnable'), false)
- // const testProfileDebugCoverage = ctrl.createRunProfile('Debug Tests w/ Coverage', TestRunProfileKind.Coverage, runHandler, false, new TestTag('runnable'), false)
testProfileRun.configureHandler = configHandler
// testProfileDebug.configureHandler = configHandlerDebug
testProfileCoverage.configureHandler = configHandler
testProfileCoverage.loadDetailedCoverage = loadDetailedCoverage
- // testProfileDebugCoverage.configureHandler = configHandler
+ testProfileCoverage.loadDetailedCoverageForTest = loadDetailedCoverageForTest
+ let prom
if(workspace.getConfiguration('ablunit').get('discoverAllTestsOnActivate', false)) {
- await commands.executeCommand('testing.refreshTests')
+ prom = commands.executeCommand('testing.refreshTests')
+ } else {
+ prom = Promise.resolve()
}
- log.info('activation complete')
- return true
+ return prom.then(() => {
+ log.info('activation complete')
+ return true
+ })
}
let contextStorageUri: Uri
-let contextResourcesUri: Uri
-let contextLogUri: Uri
function updateNode (uri: Uri, ctrl: TestController) {
log.debug('updateNode uri="' + uri.fsPath + '"')
@@ -510,28 +628,17 @@ function didChangeTextDocument (e: TextDocumentChangeEvent, ctrl: TestController
return Promise.resolve()
}
- log.info('workspace.onDidChange uri="' + e.document.uri.fsPath + '"; reason=' + e.reason)
return updateNode(e.document.uri, ctrl)
}
-export function setContextPaths (storageUri: Uri, resourcesUri: Uri, logUri: Uri) {
+export function setContextPaths (storageUri: Uri) {
contextStorageUri = storageUri
- contextResourcesUri = resourcesUri
- contextLogUri = logUri
}
export function getContextStorageUri () {
return contextStorageUri
}
-export function getContextResourcesUri () {
- return contextResourcesUri
-}
-
-export function getContextLogUri () {
- return contextLogUri
-}
-
export function checkCancellationRequested (run: TestRun) {
if (run.token.isCancellationRequested) {
log.debug('cancellation requested - chcekCancellationRequested')
@@ -541,7 +648,9 @@ export function checkCancellationRequested (run: TestRun) {
}
function getStorageUri (workspaceFolder: WorkspaceFolder) {
- if (!getContextStorageUri) { throw new Error('contextStorageUri is undefined') }
+ if (!getContextStorageUri()) {
+ throw new Error('contextStorageUri is undefined')
+ }
const dirs = workspaceFolder.uri.path.split('/')
const ret = Uri.joinPath(getContextStorageUri(), dirs[dirs.length - 1])
@@ -565,6 +674,7 @@ function getOrCreateFile (controller: TestController, uri: Uri, excludePatterns?
if (excludePatterns && excludePatterns.length > 0 && isFileExcluded(uri, excludePatterns)) {
if (existing) {
+ log.info('560')
deleteTest(controller, existing)
}
return { item: undefined, data: undefined }
@@ -732,7 +842,6 @@ function getTestFileAttrs (file: Uri) {
return 'other'
}
-// TODO - deprecate this function
function gatherTestItems (collection: TestItemCollection) {
const items: TestItem[] = []
for(const [, item] of collection) {
@@ -774,19 +883,20 @@ function getWorkspaceTestPatterns () {
return [ includePatterns, excludePatterns ]
}
-function deleteFiles (controller: TestController, files: readonly Uri[]) {
+function deleteTestsInFiles (controller: TestController, files: readonly Uri[]) {
log.info('deleted files detected: ' + files.length)
let didDelete = false
for (const uri of files) {
log.info('deleted file detected: ' + uri.fsPath)
const item = getExistingTestItem(controller, uri)
if (item) {
+ log.info('570')
didDelete = deleteTest(controller, item)
} else {
log.warn('no test file found for deleted file: ' + uri.fsPath)
}
}
- return Promise.resolve(didDelete)
+ return didDelete
}
function deleteTest (controller: TestController | undefined, item: TestItem | Uri) {
@@ -794,6 +904,7 @@ function deleteTest (controller: TestController | undefined, item: TestItem | Ur
for (const child of gatherTestItems(item.children)) {
deleteChildren(controller, child)
child.children.delete(item.id)
+ log.info('delete child test: ' + item.id + ' (children.size=' + item.children.size + ')')
testData.delete(child)
}
}
@@ -866,11 +977,13 @@ function removeExcludedChildren (parent: TestItem, excludePatterns: RelativePatt
if (data instanceof ABLTestFile) {
const excluded = isFileExcluded(item.uri!, excludePatterns)
if (item.uri && excluded) {
+ log.info('400')
deleteTest(undefined, item)
}
} else if (data?.isFile) {
removeExcludedChildren(item, excludePatterns)
if (item.children.size == 0) {
+ log.info('401')
deleteTest(undefined, item)
}
}
@@ -897,19 +1010,14 @@ function findMatchingFiles (includePatterns: RelativePattern[], token: Cancellat
function removeDeletedFiles (ctrl: TestController) {
const items = gatherAllTestItems(ctrl.items)
- const proms: PromiseLike[] = []
for (const item of items) {
- if (!item.uri) { continue }
- const p = workspace.fs.stat(item.uri)
- .then((s) => {
- log.debug('file still exists, skipping delete (item.id=' + item.id + ')')
- return
- }, (e: unknown) => {
- deleteTest(ctrl, item)
- })
- proms.push(p)
+ if (!item.uri) {
+ continue
+ }
+ if (!FileUtils.doesFileExist(item.uri)) {
+ deleteTest(ctrl, item)
+ }
}
- return Promise.all(proms).then(() => { return true })
}
function refreshTestTree (controller: TestController, token: CancellationToken): Promise {
@@ -949,8 +1057,8 @@ function refreshTestTree (controller: TestController, token: CancellationToken):
log.debug('finding files...')
- const prom1 = removeDeletedFiles(controller)
- .then(() => { return findMatchingFiles(includePatterns, token, checkCancellationToken) })
+ removeDeletedFiles(controller)
+ const prom1 = findMatchingFiles(includePatterns, token, checkCancellationToken)
.then((r) => {
for (const file of r) {
checkCancellationToken()
@@ -1005,6 +1113,7 @@ function createOrUpdateFile (controller: TestController, e: Uri | FileCreateEven
const proms: PromiseLike[] = []
for (const uri of uris) {
if (!isFileIncluded(uri, includePatterns, excludePatterns)) {
+ log.info('550')
deleteTest(controller, uri)
continue
}
diff --git a/src/parse/CallStackParser.ts b/src/parse/CallStackParser.ts
index 4ab3f230..73853fee 100644
--- a/src/parse/CallStackParser.ts
+++ b/src/parse/CallStackParser.ts
@@ -71,7 +71,7 @@ export async function parseCallstack (debugLines: ABLDebugLines, callstackRaw: s
let lineinfo: SourceMapItem | undefined = undefined
lineinfo = await debugLines.getSourceLine(moduleParent, debugLine)
.catch((e: unknown) => {
- log.info('could not find source line for ' + moduleParent + ' at line ' + debugLine + '. using raw callstack data')
+ log.warn('could not find source line for ' + moduleParent + ':' + debugLine + ' using raw callstack data (e=' + e + ')')
return undefined
})
diff --git a/src/parse/OpenedgeProjectParser.ts b/src/parse/OpenedgeProjectParser.ts
index 0134a687..da924f0d 100644
--- a/src/parse/OpenedgeProjectParser.ts
+++ b/src/parse/OpenedgeProjectParser.ts
@@ -474,9 +474,7 @@ function getWorkspaceProfileConfig (workspaceUri: Uri, openedgeProjectProfile?:
if (prf.propath.length == 0)
prf.propath = prjConfig.propath
for (const e of prf.buildPath) {
- if (!e.buildDir) {
- e.buildDir = e.path
- }
+ e.buildDir = e.buildDir ?? e.path
}
return prf
}
diff --git a/src/parse/ProfileParser.ts b/src/parse/ProfileParser.ts
index 9717d694..873499d9 100644
--- a/src/parse/ProfileParser.ts
+++ b/src/parse/ProfileParser.ts
@@ -1,4 +1,4 @@
-import { Uri, workspace } from 'vscode'
+import { DeclarationCoverage, Position, Range, Uri, workspace } from 'vscode'
import { PropathParser } from '../ABLPropath'
import { ABLDebugLines } from '../ABLDebugLines'
import { log } from '../ChannelLogger'
@@ -38,7 +38,7 @@ export class ABLProfile {
}
log.debug('section1 ' + sectionLines[1].length)
- this.profJSON = new ABLProfileJson(sectionLines[1], debugLines)
+ this.profJSON = new ABLProfileJson(uri, sectionLines[1], debugLines)
log.debug('section2 ' + sectionLines[2].length)
await this.profJSON.addModules(sectionLines[2])
log.debug('section3 ' + sectionLines[3].length)
@@ -73,7 +73,7 @@ export class ABLProfile {
}
this.profJSON.modules.sort((a, b) => a.ModuleID - b.ModuleID)
- log.debug('parsing profiler data complete')
+ log.debug('parsing profiler data complete (modules.length=' + this.profJSON.modules.length + ')')
if (writeJson) {
const jsonUri = Uri.file(uri.fsPath.replace(/\.[a-zA-Z]+$/, '.json'))
// eslint-disable-next-line promise/catch-or-return
@@ -106,6 +106,7 @@ export class ABLProfile {
const summaryRE = /^(\d+) (\d{2}\/\d{2}\/\d{4}) "([^"]*)" (\d{2}:\d{2}:\d{2}) "([^"]*)" (.*)$/
const moduleRE = /^(\d+) "([^"]*)" "([^"]*)" (\d+) (\d+) "([^"]*)"$/
+const moduleRE2 = /^(\d+) "([^"]*)" "([^"]*)" (\d+)$/
// CALL TREE: CallerID CallerLineno CalleeID CallCount
const callTreeRE = /^(\d+) (-?\d+) (\d+) (\d+)$/
// LINE SUMMARY: ModuleID LineNo ExecCount ActualTime CumulativeTime
@@ -154,7 +155,7 @@ interface ITrace { // Section 5
export interface ILineSummary { // Section 4
LineNo: number
- ExecCount?: number
+ ExecCount: number
ActualTime?: number
CumulativeTime?: number
Executable: boolean
@@ -166,7 +167,7 @@ export interface ILineSummary { // Section 4
}
class LineSummary {
- ExecCount?: number
+ ExecCount: number
ActualTime?: number
CumulativeTime?: number
trace?: ITrace[]
@@ -175,7 +176,9 @@ class LineSummary {
incLine?: number
incUri?: Uri
- constructor (public readonly LineNo: number, public readonly Executable: boolean) {}
+ constructor (public readonly LineNo: number, public readonly Executable: boolean) {
+ this.ExecCount = 0
+ }
get incPath () {
if (this.incUri) {
@@ -275,12 +278,13 @@ export class ABLProfileJson {
// StmtCnt: string | undefined
modules: IModule[] = []
userData: IUserData[] = []
- debugLines: ABLDebugLines
+ testItemId?: string
+ interpretedModuleSequence = 0
- constructor (lines: string[], debugLines: ABLDebugLines) {
+ constructor (public readonly profileUri: Uri, lines: string[], public debugLines: ABLDebugLines) {
this.debugLines = debugLines
if (lines.length > 1) {
- throw new Error('Invalid profile data - section 1 should have exactly one line')
+ throw new Error('Invalid profile data - section 1 should have exactly one line (uri=' + this.profileUri.fsPath + ')')
}
const test = summaryRE.exec(lines[0])
if(test) {
@@ -291,36 +295,37 @@ export class ABLProfileJson {
this.userID = test[5]
this.properties = JSON.parse(test[6].replace(/\\/g, '/')) as IProps
} else {
- throw new Error('Unable to parse profile data in section 1')
+ throw new Error('Unable to parse profile data in section 1 (uri=' + this.profileUri.fsPath + ')')
}
}
- private excludeSourceName (name?: string) {
- return !name ||
- name.startsWith('OpenEdge.') ||
- name.includes('VSCodeTestRunner/OpenEdge/')
- }
-
async addModules (lines: string[]) {
this.modules = []
const childModules: IModule[] = []
for(const element of lines) {
- const test = moduleRE.exec(element)
+ let test = moduleRE.exec(element)
+ if (!test) {
+ test = moduleRE2.exec(element)
+ }
const moduleName = test![2]
- let entityName: string | undefined = undefined
+ if (!moduleName) {
+ throw new Error('Unable to parse module name - name is empty (uri=' + this.profileUri.fsPath + ')')
+ }
+
let sourceName = ''
let parentName: string | undefined
const destructor: boolean = moduleName.startsWith('~')
const split = moduleName.split(' ')
if (split.length >= 4) {
- throw new Error('Unable to parse module name - has 4 sections which is more than expected: ' + moduleName)
+ throw new Error('Unable to parse module name - has 4 sections which is more than expected: ' + moduleName + ' (uri=' + this.profileUri.fsPath + ')')
}
- entityName = split[0]
+ let entityName = split[0]
if (split.length == 1) {
sourceName = split[0]
+ entityName = ''
} else {
if (split[1]) {
sourceName = split[1]
@@ -328,6 +333,9 @@ export class ABLProfileJson {
if (split[2]) {
parentName = split[2]
}
+ if (split[3]) {
+ log.warn('module has fourth section: ' + split[3] + ' (module.name=' + sourceName + ', uri=' + this.profileUri.fsPath + ')')
+ }
}
const fileinfo = await this.debugLines.propath.search(sourceName)
@@ -357,29 +365,53 @@ export class ABLProfileJson {
ISectionTwelve: []
}
-
if (Number(test![4]) != 0) {
- this.modules[this.modules.length] = mod
+ this.modules.push(mod)
} else {
- childModules[childModules.length] = mod
+ childModules.push(mod)
}
}
this.addChildModulesToParents(childModules)
}
addChildModulesToParents (childModules: IModule[]) {
- childModules.forEach(child => {
- const parent = this.modules.find(p => p.SourceName === child.SourceName)
-
- if(parent) {
- parent.childModules[parent.childModules.length] = child
- if (parent.SourceName === child.SourceName) {
- parent.SourceName = child.SourceName
+ for(const child of childModules) {
+ if (checkSkipList(child.SourceName)) {
+ continue
+ }
+ let parent = this.modules.find(p => p.SourceUri === child.SourceUri)
+ if (!parent) {
+ parent = this.modules.find(p => p.SourceName === child.ParentName)
+ }
+ if (!parent) {
+ this.interpretedModuleSequence--
+ log.warn('Could not find parent module, creating interpre modude id ' + this.interpretedModuleSequence + ' for ' + child.SourceName + ' (uri=' + this.profileUri.fsPath + ')')
+ parent = {
+ ModuleID: this.interpretedModuleSequence,
+ ModuleName: child.SourceName,
+ EntityName: child.SourceName,
+ SourceName: child.SourceName,
+ SourceUri: child.SourceUri,
+ CrcValue: 0,
+ ModuleLineNum: 0,
+ UnknownString1: '',
+ executableLines: 0,
+ executedLines: 0,
+ coveragePct: 0,
+ lineCount: 0,
+ calledBy: [],
+ calledTo: [],
+ childModules: [],
+ lines: []
}
- } else {
- throw new Error('Unable to find parent module for ' + child.SourceName + ' ' + child.ModuleName)
+ this.modules.push(parent)
}
- })
+
+ parent.childModules.push(child)
+ if (parent.SourceName === child.SourceName) {
+ parent.SourceName = child.SourceName
+ }
+ }
}
getModule (modID: number): IModule | undefined {
@@ -458,35 +490,33 @@ export class ABLProfileJson {
const modID = Number(test[1])
const sourceName = this.getModule(modID)?.SourceName
- if (this.excludeSourceName(sourceName)) continue
+ if (checkSkipList(sourceName)) {
+ continue
+ }
+
const sum = new LineSummary(Number(test[2]), true)
sum.ExecCount = Number(test[3])
sum.ActualTime = Number(test[4])
sum.CumulativeTime = Number(test[5])
if (!sourceName) {
- if (modID !== 0) {
- log.debug('could not find source name for module ' + modID)
- }
+ log.warn('could not find source name for module ' + modID)
continue
}
const lineinfo = await this.debugLines.getSourceLine(sourceName, sum.LineNo)
- if(!lineinfo) {
- if (sourceName !== 'ABLUnitCore.p') {
- log.debug('could not find source/debug line info for ' + sourceName + ' ' + sum.LineNo)
- }
- // throw new Error("Unable to find source/debug line info for " + sourceName + " " + sum.LineNo)
- } else {
+ if(lineinfo) {
sum.srcLine = lineinfo.debugLine
sum.srcUri = lineinfo.debugUri
sum.incLine = lineinfo.sourceLine
sum.incUri = lineinfo.sourceUri
+ } else {
+ log.debug('could not find source/debug line info for ' + sourceName + ' ' + sum.LineNo)
}
const mod = this.getModule(modID)
if (mod) {
- mod.lines[mod.lines.length] = sum
+ mod.lines.push(sum)
if (sum.LineNo != 0) {
mod.lineCount++
}
@@ -534,26 +564,36 @@ export class ABLProfileJson {
if (lines[lineNo] === '.') {
// set info for the previous section
if (mod) {
+ mod.executableLines = mod.lines.filter(l => l.LineNo != 0 && l.Executable).length
+ mod.executedLines = mod.lines.filter(l => l.LineNo != 0 && l.ExecCount > 0).length
if (mod.executableLines > 0) {
- mod.coveragePct = mod.executedLines / mod.executableLines * 100
+ mod.coveragePct = mod.executedLines * 100 / mod.executableLines
}
}
continue
}
-
if (lines[lineNo - 1] === '.') {
// prepare the next section by finding the correct module
mod = await this.addCoverageNextSection(lines[lineNo])
+ if (!mod) {
+ log.warn('addCoverageNextSection returned undefined (lineNo=' + lineNo + ', uri=' + this.profileUri.fsPath + ')' +
+ '\tlines[' + lineNo + ']=' + lines[lineNo])
+ }
continue
}
- if(!mod) { throw new Error('invalid data in section 6') }
+ if(!mod) {
+ log.warn('no module found for coverage data in section 6 (uri=' + this.profileUri.fsPath + ')')
+ return
+ // log.error('invalid data in section 6 (uri=' + this.profileUri.fsPath + ')')
+ // throw new Error('invalid data in section 6 (uri=' + this.profileUri.fsPath + ')')
+ }
// add exec count to existing line
const line = this.getLine(mod, Number(lines[lineNo]))
if (line) {
line.Executable = true
- mod.executedLines++
+ mod.executableLines++
continue
}
@@ -579,31 +619,32 @@ export class ABLProfileJson {
mod.lines.push(sum)
}
} catch (error) {
- log.error('Error parsing coverage data in section 6 [module=' + mod?.ModuleName + ']: error=' + error)
+ log.error('Error parsing coverage data in section 6 (module=' + mod?.ModuleName + ', uri=' + this.profileUri.fsPath + '):\n\terror=' + error)
}
this.assignParentCoverage()
}
async addCoverageNextSection (line: string) {
const test = coverageRE.exec(line)
- let mod: IModule | undefined
if (!test) {
- throw new Error('Unable to parse coverage data in section 6')
+ throw new Error('Unable to parse coverage data in section 6 (uri=' + this.profileUri.fsPath + ')')
+ }
+ if (checkSkipList(test[2])) {
+ return
}
if (test[2] != '') {
- mod = this.getChildModule(Number(test[1]), test[2])
+ const mod = this.getChildModule(Number(test[1]), test[2])
if (mod) {
mod.executableLines = Number(test[3])
+ return mod
}
}
- if (mod) {
- return mod
- }
- mod = this.getModule(Number(test[1]))
+ const mod = this.getModule(Number(test[1])) ?? this.modules.find(mod => mod.SourceName == test[2])
if (!mod) {
- throw new Error('Unable to find module ' + test[1] + ' ' + test[2] + ' in section 6')
+ log.warn('Unable to find module ' + test[1] + ' ' + test[2] + ' in section 6 (' + this.profileUri.fsPath + ')')
+ return
}
mod.executableLines += Number(test[3])
@@ -639,26 +680,46 @@ export class ABLProfileJson {
}
assignParentCoverage () {
- this.modules.forEach(parent => {
- parent.childModules.forEach(child => {
+ for (const parent of this.modules) {
+ if (checkSkipList(parent.SourceName)) {
+ continue
+ }
+ for (const child of parent.childModules) {
+ child.executableLines = child.lines.filter(l => l.LineNo > 0 && l.Executable).length
+ child.executedLines = child.lines.filter(l => l.LineNo > 0 && l.ExecCount > 0).length
parent.executableLines += child.executableLines
parent.executedLines += child.executedLines
- child.lines.forEach(line => {
+ child.lines.sort((a, b) => a.LineNo - b.LineNo)
+ for (const line of child.lines) {
+ if (line.LineNo == 0) {
+ continue
+ }
const parentLine = parent.lines.find(l => l.LineNo == line.LineNo)
+ const idx = parent.lines.findIndex(l => l.LineNo == line.LineNo)
if(parentLine) {
- parentLine.ExecCount = line.ExecCount
- parentLine.ActualTime = line.ActualTime
- parentLine.CumulativeTime = line.CumulativeTime
+ parentLine.ExecCount += line.ExecCount
+ if (line.ActualTime) {
+ if (!parentLine.ActualTime) parentLine.ActualTime = 0
+ parentLine.ActualTime += line.ActualTime
+ }
+ if (line.CumulativeTime) {
+ if (!parentLine.CumulativeTime) parentLine.CumulativeTime = 0
+ parentLine.CumulativeTime += line.CumulativeTime
+ }
+ parent.lines[idx] = parentLine
} else {
- parent.lines[parent.lines.length] = line
+ parent.lines.push(line)
}
- })
- child.lines.sort((a, b) => a.LineNo - b.LineNo)
- })
+ }
+ }
+ parent.childModules.sort((a, b) => a.ModuleID - b.ModuleID)
parent.coveragePct = parent.executedLines / parent.executableLines * 100
parent.lines.sort((a, b) => a.LineNo - b.LineNo)
- parent.childModules.sort((a, b) => a.ModuleID - b.ModuleID)
- })
+
+ if (parent.lines.length > 0) {
+ parent.lineCount = parent.lines[parent.lines.length - 1]?.LineNo ?? 0 // not totally accurate, but close
+ }
+ }
}
addSection7 (lines: string[]) {
@@ -722,7 +783,7 @@ export class ABLProfileJson {
if (!mod.ISectionNine) mod.ISectionNine = []
mod.ISectionNine.push(ISectionNine)
} else {
- log.error('Unable to find module ' + ISectionNine.ModuleID + ' in section 9')
+ log.error('Unable to find module ' + ISectionNine.ModuleID + ' in section 9 (uri=' + this.profileUri.fsPath + ')')
log.error(' - line=\'' + element + '\'')
}
}
@@ -744,7 +805,7 @@ export class ABLProfileJson {
if (!mod.ISectionTen) mod.ISectionTen = []
mod.ISectionTen.push(ISectionTen)
} else {
- log.error('Unable to find module ' + ISectionTen.ModuleID + ' in section 10')
+ log.error('Unable to find module ' + ISectionTen.ModuleID + ' in section 10 (uri=' + this.profileUri.fsPath + ')')
log.error(' - line=\'' + element + '\'')
}
}
@@ -779,7 +840,7 @@ export class ABLProfileJson {
} else {
// TODO
if (ISectionTwelve.ModuleID != 0) {
- log.error('Unable to find module " + ISectionTwelve.ModuleID + " in section 12 (line=' + element + ')')
+ log.debug('Unable to find module ' + ISectionTwelve.ModuleID + ' in section 12 (line=' + element + ', uri=' + this.profileUri.fsPath + ')')
}
}
}
@@ -800,8 +861,52 @@ export class ABLProfileJson {
data: test[2]
})
} else {
- throw new Error('Unable to parse user data')
+ throw new Error('Unable to parse user data (uri=' + this.profileUri.fsPath + ')')
}
}
}
}
+
+
+export function getModuleRange (module: IModule) {
+ const lines = module.lines.filter((a) => a.LineNo > 0)
+ for (const child of module.childModules) {
+ lines.push(...child.lines.filter((l) => l.LineNo > 0))
+ }
+ lines.sort((a, b) => { return a.LineNo - b.LineNo })
+
+ if (lines.length == 0) {
+ return undefined
+ }
+
+ const start = new Position(lines[0].LineNo - 1, 0)
+ const end = new Position(lines[lines.length - 1].LineNo - 1, 0)
+ return new Range(start, end)
+}
+
+export function getDeclarationCoverage (module: IModule) {
+ const fdc: DeclarationCoverage[] = []
+
+ const range = getModuleRange(module)
+ if (range) {
+ const zeroLine = module.lines.find((a) => a.LineNo == 0)
+ fdc.push(new DeclarationCoverage(module.EntityName ?? '', zeroLine?.ExecCount ?? 0, range))
+ }
+ for (const child of module.childModules) {
+ const childRange = getModuleRange(child)
+ if (childRange) {
+ const zeroLine = child.lines.find((a) => a.LineNo == 0)
+ fdc.push(new DeclarationCoverage(child.EntityName ?? '', zeroLine?.ExecCount ?? 0, childRange))
+ }
+ }
+ return fdc
+}
+
+export function checkSkipList (sourceName: string | undefined) {
+ return sourceName == undefined ||
+ sourceName.startsWith('OpenEdge.') ||
+ sourceName.endsWith('ABLUnitCore.p') ||
+ sourceName == 'Ccs.Common.Application' ||
+ sourceName == 'VSCode.ABLUnit.Runner.ABLRunner' ||
+ sourceName == 'VSCodeWriteProfiler.p'
+}
diff --git a/src/parse/SourceMapRCodeParser.ts b/src/parse/SourceMapRCodeParser.ts
index e9199c87..1b4345bc 100644
--- a/src/parse/SourceMapRCodeParser.ts
+++ b/src/parse/SourceMapRCodeParser.ts
@@ -214,7 +214,6 @@ export const getSourceMapFromRCode = (propath: PropathParser, uri: Uri) => {
throw new Error('could not find source name for num=' + num + ', uri="' + uri.fsPath + '"')
}
-
const parseSources = async (bytes: Uint32Array, pos: number, prefix = '') => {
const end = nextDelim(bytes, pos + 4, 1, prefix)
const childBytes = bytes.subarray(pos/4, end)
@@ -249,7 +248,6 @@ export const getSourceMapFromRCode = (propath: PropathParser, uri: Uri) => {
const end = pos/4 + 4
const childBytes = bytes.subarray(pos/4, end)
-
let sourceUri
try {
sourceUri = getSourceUri(childBytes[3])
@@ -307,7 +305,6 @@ export const getSourceMapFromRCode = (propath: PropathParser, uri: Uri) => {
const buildDebugLines = () => {
const debugUri = getSourceUri(0)
- const debugName = getSourceName(0)
if (map.length === 0) {
for (const proc of procs) {
for (const line of proc.lines ?? []) {
@@ -372,7 +369,6 @@ export const getSourceMapFromRCode = (propath: PropathParser, uri: Uri) => {
return debugLines
}
-
return workspace.fs.readFile(uri).then(async (raw) => {
const headerInfo = parseHeader(raw.subarray(0, 68))
const segmentInfo = parseSegmentTable(raw.subarray(headerInfo.segmentTableLoc, headerInfo.segmentTableLoc + headerInfo.segmentTableSize))
diff --git a/src/parse/SourceMapXrefParser.ts b/src/parse/SourceMapXrefParser.ts
index 757f0ae2..7d2f94b8 100644
--- a/src/parse/SourceMapXrefParser.ts
+++ b/src/parse/SourceMapXrefParser.ts
@@ -15,11 +15,10 @@ interface IIncLength {
lineCount: number
}
-export const getSourceMapFromXref = (propath: PropathParser, debugSourceName: string) => {
+export const getSourceMapFromXref = (propath: PropathParser, debugSource: string) => {
const map: SourceMap[] = []
const incLengths: IIncLength[] = []
const includes: IXrefInclude[] = []
- const warnings: string[] = []
let lineCount = 0
const readIncludeLineCount = (uri: Uri) => {
@@ -104,12 +103,15 @@ export const getSourceMapFromXref = (propath: PropathParser, debugSourceName: st
}
const importDebugLines = async (sourcePath: string, debugSourceUri: Uri, xrefUri: Uri) => {
- const m: SourceMap = {
- path: sourcePath,
- sourceUri: debugSourceUri,
- items: [],
+ let m: SourceMap | undefined = map.find((i) => i.sourceUri.fsPath == debugSourceUri.fsPath)
+ if (!m) {
+ m = {
+ path: sourcePath,
+ sourceUri: debugSourceUri,
+ items: [],
+ }
+ map.push(m)
}
- map.push(m)
// This reads the xref to find where the include files belong, and finds how many lines each of those includes contain
// It is is prone to error, especially in cases of multiple line arguments or include declarations.
@@ -141,42 +143,17 @@ export const getSourceMapFromXref = (propath: PropathParser, debugSourceName: st
return m
}
- const getSourceMap = async (debugSourceName: string) => {
- log.info('debugSourceName=' + debugSourceName)
+ const getSourceMap = async (debugSource: string) => {
// check for previously parsed source map
- let debugLines = map.filter((dlm) => dlm.path === debugSourceName)
- if (debugLines && debugLines.length > 0) {
- log.info('debugLines found! debugLines.length=' + debugLines.length)
- if (debugLines.length > 1) {
- log.error('more than one source map found for ' + debugSourceName)
- throw new Error('more than one source map found for ' + debugSourceName)
- }
- return debugLines[0]
- }
-
- // find the source file in the propath
- log.info('searching for ' + debugSourceName)
- const fileinfo = await propath.search(debugSourceName)
+ const fileinfo = await propath.search(debugSource)
if (!fileinfo) {
- if (!debugSourceName.startsWith('OpenEdge.') && debugSourceName != 'ABLUnitCore.p') {
- if (!warnings.includes(debugSourceName)) {
- log.error('[getSourceMap] WARNING: cannot find ' + debugSourceName + ' in propath.')
- warnings.push(debugSourceName)
- }
- }
- return undefined
+ throw new Error('cannot find file in propath: ' + debugSource)
}
// import the source map and return it
- try {
- debugLines = [await importDebugLines(debugSourceName, fileinfo.uri, fileinfo.xrefUri)]
- } catch (e: unknown) {
- log.warn('cannot find source map for ' + debugSourceName + ' (e=' + e + ')')
- return undefined
- }
- log.info('returning sourcemap for ' + debugSourceName)
- return debugLines[0]
+ const debugLines = await importDebugLines(debugSource, fileinfo.uri, fileinfo.xrefUri)
+ return debugLines
}
- return getSourceMap(debugSourceName)
+ return getSourceMap(debugSource)
}
diff --git a/src/parse/TestProfileParser.ts b/src/parse/TestProfileParser.ts
index 0811834b..2dbbbb2c 100644
--- a/src/parse/TestProfileParser.ts
+++ b/src/parse/TestProfileParser.ts
@@ -80,12 +80,22 @@ export function parseRunProfiles (workspaceFolders: WorkspaceFolder[], wsFilenam
try {
wfConfig = getConfigurations(Uri.joinPath(workspaceFolder.uri, '.vscode', wsFilename))
} catch (e: unknown) {
- if (e instanceof FileSystemError && e.code === 'ENOENT') {
- log.warn('no .vscode/' + wsFilename + ' file found. using default profile')
- return defaultConfig.configurations
+ if (e instanceof FileSystemError) {
+ if (e.name === 'FileNotFound') {
+ log.info('no .vscode/' + wsFilename + ' file found. using default profile (e=' + e.name + ')')
+ }else {
+ log.notificationWarning('Failed to import .vscode/ablunit-test-profile.json. Attempting to use default profile...\n[' + e.code + ']: ' + e.message)
+ }
+ } else if (e instanceof Error) {
+ // @ts-expect-error ThisIsSafeForTesting
+ if (e.code == 'ENOENT') {
+ log.info('no .vscode/' + wsFilename + ' file found. using default profile (e=' + e + ')')
+ } else {
+ log.notificationWarning('Failed to import .vscode/ablunit-test-profile.json! Attempting to use default profile...\n(e=' + e + ')')
+ }
+ } else {
+ log.notificationError('Failed to import .vscode/ablunit-test-profile.json! Attempting to use default profile...\n(e=' + e + ')')
}
- log.notificationWarning('Could not import .vscode/ablunit-test-profile.json. Attempting to use default profile...')
- log.warn('e=' + e)
return defaultConfig.configurations
}
if (wfConfig.configurations.length === 0) {
@@ -184,8 +194,15 @@ export class RunConfig extends DefaultRunProfile {
this.options = new CoreOptions(this.profile.options)
const tmpFilename = (this.profile.options?.output?.filename?.replace(/\.xml$/, '') ?? 'results') + '.xml'
+ if (this.options?.output?.location) {
+ this.options.output.location = this.options.output.location.replace(/\\\\/g, '/')
+ if (!this.options.output.location.endsWith('/')) {
+ // adding a trailing slash to indicate this is a drirectory to this.getUri
+ this.options.output.location += '/'
+ }
+ }
this.optionsUri = {
- locationUri: this.getUri(this.profile.options?.output?.location + '/'),
+ locationUri: this.getUri(this.options?.output?.location),
filenameUri: Uri.joinPath(this.tempDirUri, tmpFilename),
updateUri: Uri.joinPath(this.tempDirUri, 'updates.log'),
}
diff --git a/src/parse/config/CoreOptions.ts b/src/parse/config/CoreOptions.ts
index 442ce03e..e6f4256c 100644
--- a/src/parse/config/CoreOptions.ts
+++ b/src/parse/config/CoreOptions.ts
@@ -34,11 +34,11 @@ export interface ICoreOptions {
}
export interface IABLUnitJson {
+ $comment?: string
options: ICoreOptions
tests: ITestObj[]
}
-
export class CoreOptions implements ICoreOptions {
output: ICoreOutput = {
location: '${tempDir}',
diff --git a/test/createTestConfig.mjs b/test/createTestConfig.mjs
index 95c31897..5bb38007 100644
--- a/test/createTestConfig.mjs
+++ b/test/createTestConfig.mjs
@@ -12,6 +12,8 @@ import * as fs from 'fs'
import process from 'process'
const __dirname = path.dirname(fileURLToPath(import.meta.url))
+const DLC = undefined
+// const DLC = 'C:/Progress/OpenEdge'
const vsVersionNum = '1.88.0'
const vsVersion = process.env['ABLUNIT_TEST_RUNNER_VSCODE_VERSION'] ?? 'stable'
const useOEAblPrerelease = false
@@ -50,6 +52,8 @@ function getMochaTimeout (projName) {
switch (projName) {
case 'DebugLines': return 120000 // install openedge-abl-lsp for the first time, so give it a moment to start
+ // case 'proj0': return 30000
+ case 'proj0': return 45000
case 'proj1': return 30000
// case 'proj2': return 20000
case 'proj5': return 60000
@@ -101,7 +105,7 @@ function getMochaOpts (projName) {
}
}
- if (process.env['CIRCLECI']) {
+ if (process.env['CIRCLECI'] == 'true') {
mochaOpts.bail = false
}
@@ -123,6 +127,7 @@ function getLaunchArgs (projName) {
// args.push('--wait')
// args.push('--locale ')
// args.push('--user-data-dir', '')
+ // args.push('--user-data-dir', '.vscode-test/user-data_' + projName)
// args.push('--profile ')
// args.oush('--profile=ablunit-test')
// args.push('--profile-temp') // create a temporary profile for the test run in lieu of cleaning up user data
@@ -239,6 +244,10 @@ function getTestConfig (testDir, projName) {
VSCODE_SKIP_PRELAUNCH: true,
}
+ if (DLC) {
+ env.DLC = DLC
+ }
+
/** @type {import('@vscode/test-cli').IDesktopTestConfiguration} */
const testConfig = {
// -- IDesktopPlatform -- //
@@ -303,13 +312,14 @@ function getCoverageOpts () {
// * 'lcovonly' does not include 'html' output
reporter: [ 'text', 'lcovonly' ],
output: coverageDir, // https://github.com/microsoft/vscode-test-cli/issues/38
- include: [
- '**'
- ],
exclude: [
'node_modules',
+ 'node_modules/',
'node_modules/**',
- '**/node_modules/**'
+ './node_modules',
+ './node_modules/',
+ './node_modules/**',
+ '**/node_modules/**',
],
}
return coverageOpts
diff --git a/test/parse/SourceMapXrefParser.test.ts b/test/parse/SourceMapXrefParser.test.ts
index 44da6174..a8fc0964 100644
--- a/test/parse/SourceMapXrefParser.test.ts
+++ b/test/parse/SourceMapXrefParser.test.ts
@@ -37,7 +37,7 @@ test('SourceMapXrefParser.test_1', () => {
return getSourceMap(propath, testuri).then((sourceMap) => {
for (const item of sourceMap.items) {
- log.info('item=' + item.debugLine + ',' + item.sourceLine)
+ log.info('item=' + JSON.stringify(item, null, 2))
}
assert.equal(sourceMap.items.length, getLineCount(toUri('.dbg/test_1/test.p')))
diff --git a/test/suites/proj0.test.ts b/test/suites/proj0.test.ts
index ad85f032..3a92f8e4 100644
--- a/test/suites/proj0.test.ts
+++ b/test/suites/proj0.test.ts
@@ -1,7 +1,7 @@
import { Uri, commands, window, workspace } from 'vscode'
import { assert, getRcodeCount, getResults, getTestControllerItemCount, getTestItem, getXrefCount, log, rebuildAblProject, refreshTests, runAllTests, runAllTestsWithCoverage, runTestAtLine, runTestsDuration, runTestsInFile, sleep2, suiteSetupCommon, toUri, updateConfig, updateTestProfile } from '../testCommon'
import { ABLResultsParser } from 'parse/ResultsParser'
-import { TimeoutError } from 'ABLUnitRun'
+import { TimeoutError } from 'Errors'
import * as vscode from 'vscode'
import * as FileUtils from '../../src/FileUtils'
@@ -16,22 +16,26 @@ suite('proj0 - Extension Test Suite', () => {
const disposables: vscode.Disposable[] = []
suiteSetup('proj0 - before', async () => {
- FileUtils.deleteFile(toUri('.vscode/ablunit-test-profile.json'))
- FileUtils.deleteFile(toUri('src/dirA/proj10.p'))
- FileUtils.deleteFile(toUri('UNIT_TEST.tmp'))
+ FileUtils.copyFile(toUri('.vscode/settings.json'), toUri('.vscode/settings.json.bk'), { force: true })
+
+ FileUtils.deleteFile([
+ toUri('.vscode/ablunit-test-profile.json'),
+ toUri('src/dirA/proj10.p'),
+ toUri('UNIT_TEST.tmp'),
+ ], { force: true })
+
await suiteSetupCommon()
await commands.executeCommand('testing.clearTestResults')
- FileUtils.copyFile(
- toUri('.vscode/settings.json'),
- toUri('.vscode/settings.json.bk'),
- { force: true })
return
})
teardown('proj0 - afterEach', () => {
- FileUtils.deleteFile(toUri('.vscode/ablunit-test-profile.json'))
- FileUtils.deleteFile(toUri('src/dirA/proj10.p'))
- FileUtils.deleteFile(toUri('UNIT_TEST.tmp'))
+ log.info('proj0 teardown')
+ FileUtils.deleteFile([
+ toUri('.vscode/ablunit-test-profile.json'),
+ toUri('src/dirA/proj10.p'),
+ toUri('UNIT_TEST.tmp'),
+ ], { force: true })
while (disposables.length > 0) {
const d = disposables.pop()
if (d) {
@@ -40,11 +44,11 @@ suite('proj0 - Extension Test Suite', () => {
log.warn('disposables.length != 0')
}
}
- if (FileUtils.doesFileExist(toUri('.vscode/settings.json.bk'))) {
- FileUtils.deleteFile(toUri('.vscode/settings.json'))
- FileUtils.copyFile(toUri('.vscode/settings.json.bk'), toUri('.vscode/settings.json'), { force: true })
- }
- log.info('proj0 teardown/afterEach --- end')
+ return
+ })
+
+ suiteTeardown('proj0 - after', () => {
+ FileUtils.renameFile(toUri('.vscode/settings.json.bk'), toUri('.vscode/settings.json'))
})
test('proj0.01 - ${workspaceFolder}/ablunit.json file exists', () => {
@@ -88,7 +92,7 @@ suite('proj0 - Extension Test Suite', () => {
await window.showTextDocument(testFileUri)
await runAllTestsWithCoverage()
- const lines = (await getResults())[0].coverage.get(testFileUri.fsPath) ?? []
+ const lines = (await getResults())[0].statementCoverage.get(testFileUri.fsPath) ?? []
assert.assert(lines, 'no coverage found for ' + workspace.asRelativePath(testFileUri))
assert.linesExecuted(testFileUri, [5, 6])
})
@@ -99,7 +103,7 @@ suite('proj0 - Extension Test Suite', () => {
await window.showTextDocument(testFileUri)
await runAllTests()
- const lines = (await getResults())[0].coverage.get(testFileUri.fsPath) ?? []
+ const lines = (await getResults())[0].statementCoverage.get(testFileUri.fsPath) ?? []
if (lines && lines.length > 0) {
assert.fail('coverage should be empty for ' + workspace.asRelativePath(testFileUri) + ' (lines.length=' + lines.length + ')')
}
@@ -133,7 +137,7 @@ suite('proj0 - Extension Test Suite', () => {
test('proj0.07 - parse test class with skip annotation', async () => {
await commands.executeCommand('vscode.open', toUri('src/ignoreMethod.cls'))
- await sleep2(250)
+ await sleep2(100)
const testClassItem = await getTestItem(toUri('src/ignoreMethod.cls'))
if (!testClassItem) {
@@ -165,8 +169,8 @@ suite('proj0 - Extension Test Suite', () => {
return true
}, (e: unknown) => {
if (e instanceof Error) {
- log.info('e.message=' + e.message)
- log.info('e.stack=' + e.stack)
+ log.error('e.message=' + e.message)
+ log.error('e.stack=' + e.stack)
}
assert.fail('error parsing results_test1.xml: ' + e)
})
@@ -214,13 +218,7 @@ suite('proj0 - Extension Test Suite', () => {
}, (e: unknown) => { throw e })
const startCount = await getTestItem(toUri('src/dirA/proj10.p'))
- .then((r) => {
- for (const [ ,c] of r.children) {
- log.info('c.label=' + c.label + '; c.id=' + c.id)
- }
- return r.children.size
- }, (e: unknown) => { throw e })
-
+ .then((r) => r.children.size)
// update test program
const edit = new vscode.WorkspaceEdit()
@@ -231,12 +229,7 @@ suite('proj0 - Extension Test Suite', () => {
// validate test case items added
await sleep2(250) // TODO - remove me
const endCount = await getTestItem(toUri('src/dirA/proj10.p'))
- .then((r) => {
- for (const [ ,c] of r.children) {
- log.info('c.label=' + c.label + '; c.id=' + c.id)
- }
- return r.children.size
- }, (e: unknown) => { throw e })
+ .then((r) => r.children.size)
assert.equal(endCount - startCount, 2, 'test cases added != 2 (endCount=' + endCount + '; startCount=' + startCount + ')')
})
@@ -263,35 +256,41 @@ suite('proj0 - Extension Test Suite', () => {
})
test('proj0.11 - timeout 5s', () => {
- return updateConfig('ablunit.files.exclude', '**/.{builder,pct}/**')
- .then((r) => { return updateTestProfile('timeout', 5000) })
- .then((r) => { return sleep2(250) })
- .then((r) => { return runTestsInFile('src/timeout.p', 0) })
- .then(() => { return commands.executeCommand('_ablunit.getTestRunError') })
- .then((e) => {
- assert.tests.timeout(e)
- return
+ const prom = updateConfig('ablunit.files.exclude', '**/.{builder,pct}/**')
+ .then(() => { return updateTestProfile('timeout', 5000) })
+ .then(() => { return sleep2(250) })
+ .then(() => { return runTestsInFile('src/timeout.p', 0) })
+ .then(() => {
+ return assert.fail('expected TimeoutError to be thrown')
+ }, (e: unknown) => {
+ log.info('e=' + e)
+ return assert.tests.timeout(e)
})
+ return prom
})
test('proj0.12 - timeout 1500ms fail', () => {
- return updateConfig('ablunit.files.exclude', '**/.{builder,pct}/**')
+ const prom = updateConfig('ablunit.files.exclude', '**/.{builder,pct}/**')
.then(() => { return updateTestProfile('timeout', 1500) })
.then(() => { return runTestAtLine('src/timeout.p', 37, 0) })
.then(() => { return commands.executeCommand('_ablunit.getTestRunError') })
- .then((e) => {
+ .then(() => {
+ return assert.fail('expected TimeoutError to be thrown')
+ }, (e: unknown) => {
+ log.info('e=' + e)
assert.tests.timeout(e)
const t: TimeoutError = e as TimeoutError
assert.durationMoreThan(t.duration, 1500)
assert.durationLessThan(t.duration, 2000)
return
})
+ return prom
})
test('proj0.13 - timeout 2500ms pass', () => {
- return updateTestProfile('timeout', 2500)
+ const prom = updateTestProfile('timeout', 2500)
.then(() => { return updateConfig('ablunit.files.exclude', '**/.{builder,pct}/**') })
- .then(() => { return sleep2(500)})
+ .then(() => { return sleep2(100) })
.then(() => { return runTestAtLine('src/timeout.p', 37, 0) })
.then(() => { return commands.executeCommand('_ablunit.getTestRunError') })
.then((e) => {
@@ -300,16 +299,19 @@ suite('proj0 - Extension Test Suite', () => {
assert.fail('expected no error to be thrown but got e=' + JSON.stringify(e, null, 2))
}
assert.durationMoreThan(runTestsDuration, 2000)
- assert.durationLessThan(runTestsDuration, 3000)
+ assert.durationLessThan(runTestsDuration, 3250)
return
})
+ return prom
})
test('proj0.14 - timeout invalid -5s', () => {
- return updateTestProfile('timeout', -5000)
+ const prom = updateTestProfile('timeout', -5000)
.then(() => { return runTestsInFile('src/simpleTest.p', 0) })
.then(() => { return commands.executeCommand('_ablunit.getTestRunError') })
.then((e) => {
+ return assert.fail('expected RangeError to be thrown but got e=' + JSON.stringify(e, null, 2))
+ }, (e: unknown) => {
if (e instanceof Error) {
log.info('e=' + JSON.stringify(e))
assert.equal(e.name, 'RangeError', 'expecting RangeError due to negative timeout value. e=' + JSON.stringify(e, null, 2))
@@ -318,6 +320,77 @@ suite('proj0 - Extension Test Suite', () => {
}
return
})
+ return prom
+ })
+
+ test('proj0.17 - coverage in class property getters/setters', async () => {
+ FileUtils.deleteFile([toUri('results.xml'), toUri('results.json')], { force: true })
+ FileUtils.copyFile(toUri('.vscode/ablunit-test-profile.proj0.17.json'), toUri('.vscode/ablunit-test-profile.json'))
+ await runTestAtLine('src/test_17.cls', 33, 1, true)
+ .then(() => {
+ assert.tests.count(1)
+ assert.tests.passed(1)
+ assert.tests.failed(0)
+ assert.tests.errored(0)
+ assert.tests.skipped(0)
+ assert.linesExecuted('src/test_17.cls', [6, 7, 8])
+ assert.linesExecuted('src/test_17.cls', [40, 41, 42, 43])
+ })
+ })
+
+ test('proj0.18 - not 100% coverage', async () => {
+ await runTestsInFile('src/threeTestProcedures.p', 1, true)
+ const res = await getResults()
+ assert.equal(res.length, 1, 'ABLResults[].length')
+ assert.equal(res[0].profileJson.length, 5, 'ABLResults[0].profileJson[].length')
+
+ const fc = res[0].fileCoverage.get(toUri('src/threeTestProcedures.p').fsPath)
+ const sc = res[0].statementCoverage.get(toUri('src/threeTestProcedures.p').fsPath) ?? []
+ const dc = res[0].declarationCoverage.get(toUri('src/threeTestProcedures.p').fsPath) ?? []
+ assert.ok(fc, 'fileCoverage')
+ assert.greater(sc.length, 10, 'statementCoverage[].length')
+ assert.equal(dc.length, 5, 'declarationCoverage[].length')
+
+ assert.ok(fc?.branchCoverage == undefined, 'branchCoverage')
+ assert.equal(fc?.declarationCoverage?.total, 5, 'fc.declarationCoverage.total')
+ assert.equal(fc?.statementCoverage?.total, 19, 'fc.statementCoverage.total')
+ assert.less(fc?.declarationCoverage?.covered ?? 0, fc?.declarationCoverage?.total ?? 0,
+ 'declarationCoverage not 100% (' + (fc?.declarationCoverage?.covered ?? 0) + ' >= ' + (fc?.declarationCoverage?.total ?? 0) + ')')
+ assert.less(fc?.statementCoverage?.covered ?? 0, fc?.statementCoverage?.total ?? 0,
+ 'statementCoverage not 100% (' + (fc?.statementCoverage?.covered ?? 0) + ' >= ' + (fc?.statementCoverage?.total ?? 0) + ')')
+ })
+
+ test('proj0.19 - program runs external source', async () => {
+ await runTestsInFile('src/test19.p', 1, true)
+ const res = await getResults()
+ assert.equal(res.length, 1, 'ABLResults[].length')
+ assert.equal(res[0].fileCoverage.size, 1, 'ABLResults[0].fileCoverage.size')
+ assert.equal(res[0].declarationCoverage.size, 1, 'ABLResults[0].declarationCoverage.size')
+
+ let cnt = 0
+ res[0].declarationCoverage.forEach((dc, path) => {
+ assert.equal(dc.length, 4, 'dc.length (path=' + path + ')')
+ cnt++
+ })
+ assert.equal(cnt, 1, 'declarationCoverage count')
+
+ await commands.executeCommand('testing.openCoverage')
+ await sleep2(100)
+
+ // const coverage: FileCoverage = await commands.executeCommand('testing.coverage.uri', toUri('src/test19.p'))
+ // log.info('coverage=' + JSON.stringify(coverage, null, 2))
+
+ for (const child of res[0].tests[0].children) {
+ const [testId, ] = child
+ const r = await commands.executeCommand('_loadDetailedCoverageForTest', toUri('src/test19.p'), testId).then((r) => {
+ log.info('success')
+ return r
+ }, (e: unknown) => {
+ log.error('e=' + e)
+ throw e
+ })
+ log.debug('r=' + JSON.stringify(r, null, 2))
+ }
})
})
diff --git a/test/suites/proj1.test.ts b/test/suites/proj1.test.ts
index e0f5dfd9..7c8eec13 100644
--- a/test/suites/proj1.test.ts
+++ b/test/suites/proj1.test.ts
@@ -30,11 +30,11 @@ suite('proj1 - Extension Test Suite', () => {
})
suiteTeardown('proj1 - suiteTeardown', () => {
- FileUtils.deleteFile(
+ FileUtils.deleteFile([
Uri.joinPath(workspaceUri, 'openedge-project.bk.json'),
Uri.joinPath(workspaceUri, '.vscode', 'ablunit-test-profile.bk.json'),
Uri.joinPath(workspaceUri, '.vscode', 'settings.bk.json'),
- )
+ ])
})
test('proj1.1 - output files exist 1 - compile error', () => {
@@ -152,31 +152,6 @@ suite('proj1 - Extension Test Suite', () => {
})
})
- test('proj1.8 - update charset to ISO8559-1, then read file with UTF-8 chars', async () => {
- FileUtils.copyFile(toUri('openedge-project.proj1.8.json'), toUri('openedge-project.json'), { force: true })
-
- await runTestAtLine('import_charset.p', 14)
- .then(() => {
- log.info('testing.runAtCursor complete')
- assert.tests.count(1)
- assert.tests.passed(0)
- assert.tests.failed(1)
- assert.tests.errored(0)
- })
- })
-
- test('proj1.9 - check startup parmaeters for -y -yx', async () => {
- FileUtils.copyFile(Uri.joinPath(workspaceUri, 'openedge-project.proj1.9.json'), Uri.joinPath(workspaceUri, 'openedge-project.json'), { force: true })
- await runTestAtLine('import_charset.p', 68)
- .then(() => {
- log.info('testing.runAtCursor complete')
- assert.tests.count(1)
- assert.tests.passed(0)
- assert.tests.failed(1)
- assert.tests.errored(0)
- })
- })
-
test('proj1.10 - xref options', async () => {
if (oeVersion() < '12.5') {
@@ -282,7 +257,7 @@ suite('proj1 - Extension Test Suite', () => {
return p
})
- test('proj1.15A - compile option without MIN-SIZE without xref', () => {
+ test('proj1.15 - compile option without MIN-SIZE without xref', () => {
const p = compileWithTaskAndRunWithCoverage('ant build')
.then(() => {
assert.linesExecuted('test_15.p', [9, 10, 13, 14])
@@ -311,14 +286,39 @@ suite('proj1 - Extension Test Suite', () => {
})
return p
})
-})
+ test('proj1.98 - check startup parmaeters for -y -yx', async () => {
+ FileUtils.copyFile(Uri.joinPath(workspaceUri, 'openedge-project.proj1.98.json'), Uri.joinPath(workspaceUri, 'openedge-project.json'), { force: true })
+ await runTestAtLine('import_charset.p', 68)
+ .then(() => {
+ log.info('testing.runAtCursor complete')
+ assert.tests.count(1)
+ assert.tests.passed(0)
+ assert.tests.failed(1)
+ assert.tests.errored(0)
+ })
+ })
+
+ test('proj1.99 - update charset to ISO8559-1, then read file with UTF-8 chars', async () => {
+ FileUtils.copyFile(toUri('openedge-project.proj1.99.json'), toUri('openedge-project.json'), { force: true })
+
+ await runTestAtLine('import_charset.p', 14)
+ .then(() => {
+ log.info('testing.runAtCursor complete')
+ assert.tests.count(1)
+ assert.tests.passed(1)
+ assert.tests.failed(0)
+ assert.tests.errored(0)
+ })
+ })
+
+})
async function compileWithTaskAndRunWithCoverage (taskName: string) {
- FileUtils.deleteFile(
+ FileUtils.deleteFile([
Uri.joinPath(workspaceUri, 'test_15.r'),
Uri.joinPath(workspaceUri, 'openedge-project.json'),
- )
+ ])
FileUtils.copyFile(Uri.joinPath(workspaceUri, '.vscode', 'ablunit-test-profile.proj1.15.json'), Uri.joinPath(workspaceUri, '.vscode', 'ablunit-test-profile.json'), { force: true })
const p2 = new Promise((resolve) => {
diff --git a/test/suites/proj2.test.ts b/test/suites/proj2.test.ts
index e45339c3..92bda2cf 100644
--- a/test/suites/proj2.test.ts
+++ b/test/suites/proj2.test.ts
@@ -28,7 +28,7 @@ suite('proj2 - Extension Test Suite', () => {
.then(() => commands.executeCommand('testing.runCurrentFile'))
.then(() => getResults())
.then((recentResults) => {
- log.info('recentResults = ' + recentResults + ' ' + recentResults.length)
+ log.info('recentResults.length=' + recentResults.length)
const tc = recentResults[0].ablResults?.resultsJson[0].testsuite?.[0].testcases?.[0]
const mdText = tc?.failures?.[0].callstack.items[1].markdownText
if (!mdText) {
@@ -60,53 +60,48 @@ suite('proj2 - Extension Test Suite', () => {
})
})
- test('proj2.4 - compile error - run all tests', () => {
+ test('proj2.4 - compile error - run all tests', async () => {
FileUtils.copyFile(
toUri('src/compileError.p.saveme'),
toUri('src/compileError.p'),
{ force: true }
)
- return runAllTests()
- .then(() => {
- throw new Error('test should have failed due to compile error')
- }, (e: unknown) => {
- log.info('e=' + e)
- assert.ok('test failed as expected')
- return true
- })
+ try {
+ await runAllTests()
+ } catch (e) {
+ log.info('e=' + e)
+ assert.ok('test failed as expected')
+ return true
+ }
+ throw new Error('test should have failed due to compile error')
})
- test('proj2.5 - compile error - run tests in file', () => {
+ test('proj2.5 - compile error - run tests in file', async () => {
FileUtils.copyFile(
toUri('src/compileError.p.saveme'),
toUri('src/compileError.p'),
{ force: true }
)
- return runTestsInFile('src/compileError.p')
- .then(() => {
- throw new Error('test should have failed due to compile error')
- }, (e: unknown) => {
- log.info('e=' + e)
- assert.ok('tests failed as expected')
- return true
- })
+ try {
+ await runTestsInFile('src/compileError.p')
+ } catch (e) {
+ log.info('e=' + e)
+ assert.ok('tests failed as expected')
+ return true
+ }
+ throw new Error('test should have failed due to compile error')
})
- test('proj2.6 - compile error - run with db conn', () => {
+ test('proj2.6 - compile error - run with db conn', async () => {
FileUtils.copyFile(
toUri('src/compileError.p.saveme'),
toUri('src/compileError.p'),
{ force: true }
)
- return selectProfile('profileWithDBConn')
- .then(() => runTestsInFile('src/compileError.p'))
- .then(() => {
- assert.ok('test passed as expected')
- assert.tests.count(1)
- return
- }, (e: unknown) => {
- throw new Error('test should have passed, but threw error e=' + e)
- })
+ await selectProfile('profileWithDBConn')
+ await runTestsInFile('src/compileError.p')
+ assert.ok('test passed as expected')
+ assert.tests.count(1)
})
})
diff --git a/test/suites/proj3.test.ts b/test/suites/proj3.test.ts
index f41aa2d7..65f3d0a0 100644
--- a/test/suites/proj3.test.ts
+++ b/test/suites/proj3.test.ts
@@ -1,4 +1,4 @@
-import { assert, getDefaultDLC, getWorkspaceUri, oeVersion, runAllTests, setRuntimes, suiteSetupCommon, Uri } from '../testCommon'
+import { assert, getDefaultDLC, getWorkspaceUri, oeVersion, runAllTests, runAllTestsWithCoverage, setRuntimes, suiteSetupCommon, Uri } from '../testCommon'
const workspaceUri = getWorkspaceUri()
@@ -14,7 +14,21 @@ suite('proj3 - Extension Test Suite', () => {
})
test('proj3.1 - target/ablunit.json file exists', () => {
- return runAllTests().then(() => {
+ const prom = runAllTests().then(() => {
+ const ablunitJson = Uri.joinPath(workspaceUri, 'target', 'ablunit.json')
+ const resultsXml = Uri.joinPath(workspaceUri, 'ablunit-output', 'results.xml')
+ const listingsDir = Uri.joinPath(workspaceUri, 'target', 'listings')
+
+ assert.fileExists(ablunitJson)
+ assert.fileExists(resultsXml)
+ assert.notDirExists(listingsDir)
+ return
+ }, (e: unknown) => { throw e })
+ return prom
+ })
+
+ test('proj3.2 - target/ablunit.json file exists w/ coverage', () => {
+ return runAllTestsWithCoverage().then(() => {
const ablunitJson = Uri.joinPath(workspaceUri, 'target', 'ablunit.json')
const resultsXml = Uri.joinPath(workspaceUri, 'ablunit-output', 'results.xml')
const listingsDir = Uri.joinPath(workspaceUri, 'target', 'listings')
diff --git a/test/suites/proj4.test.ts b/test/suites/proj4.test.ts
index 1da51881..6dc79e6e 100644
--- a/test/suites/proj4.test.ts
+++ b/test/suites/proj4.test.ts
@@ -1,11 +1,25 @@
-import { assert, getDefaultDLC, getSessionTempDir, getWorkspaceUri, oeVersion, runAllTests, setRuntimes, suiteSetupCommon, updateTestProfile, Uri } from '../testCommon'
+import { assert, getDefaultDLC, getWorkspaceUri, oeVersion, runAllTests, runAllTestsWithCoverage, setRuntimes, suiteSetupCommon, updateTestProfile, Uri } from '../testCommon'
import * as FileUtils from '../../src/FileUtils'
const sessionTempDir = getSessionTempDir()
+
+function getSessionTempDir () {
+ if (process.platform === 'win32') {
+ return Uri.file('c:/temp/ablunit')
+ }
+ if(process.platform === 'linux') {
+ return Uri.file('/tmp/ablunit')
+ }
+ throw new Error('Unsupported platform: ' + process.platform)
+}
+
suite('proj4 - Extension Test Suite', () => {
suiteSetup('proj4 - before', async () => {
+ if (!FileUtils.doesFileExist('.vscode/settings.json') && FileUtils.doesFileExist('.vscode/settings.json.bk')) {
+ FileUtils.copyFile('.vscode/settings.json.bk', '.vscode/settings.json')
+ }
await suiteSetupCommon()
if (process.platform === 'linux') {
await updateTestProfile('tempDir', '/tmp/ablunit')
@@ -22,49 +36,107 @@ suite('proj4 - Extension Test Suite', () => {
await updateTestProfile('profiler.listings', 'c:\\temp\\ablunit-local\\listings')
})
- test('proj4.1 - Absolute Paths', async () => {
+ test('proj4.1 - Absolute Paths', () => {
const listingsDir = Uri.joinPath(sessionTempDir, 'listings')
const resultsXml = Uri.joinPath(sessionTempDir, 'tempDir', 'results.xml')
- await updateTestProfile('profiler.listings', listingsDir.fsPath)
+ FileUtils.deleteDir(listingsDir)
+ FileUtils.deleteFile(resultsXml)
+
+ const prom = updateTestProfile('profiler.listings', listingsDir.fsPath)
.then(() => { return updateTestProfile('tempDir', Uri.joinPath(sessionTempDir, 'tempDir').fsPath) })
.then(() => { return runAllTests() })
.then(() => {
assert.fileExists(resultsXml)
+ assert.notDirExists(listingsDir)
+
+ FileUtils.deleteDir(listingsDir)
+ FileUtils.deleteFile(resultsXml)
+ return true
+ }, (e: unknown) => { throw e })
+ return prom
+ })
+
+ test('proj4.2 - Absolute Paths w/ coverage', () => {
+ const listingsDir = Uri.joinPath(sessionTempDir, 'listings')
+ const resultsXml = Uri.joinPath(sessionTempDir, 'tempDir', 'results.xml')
+ FileUtils.deleteDir(listingsDir)
+ FileUtils.deleteFile(resultsXml)
+
+ const prom = updateTestProfile('profiler.listings', listingsDir.fsPath)
+ .then(() => { return updateTestProfile('tempDir', Uri.joinPath(sessionTempDir, 'tempDir').fsPath) })
+ .then(() => { return runAllTestsWithCoverage() })
+ .then(() => {
assert.dirExists(listingsDir)
- return
+ assert.fileExists(resultsXml)
+
+ FileUtils.deleteDir(listingsDir)
+ FileUtils.deleteFile(resultsXml)
+ return true
}, (e: unknown) => { throw e })
+ return prom
})
- test('proj4.2 - tempDir=.builder/ablunit', async () => {
+ test('proj4.3 - tempDir=.builder/ablunit', async () => {
await updateTestProfile('tempDir', '.builder/ablunit')
const workspaceUri = getWorkspaceUri()
- await runAllTests()
const ablunitJson = Uri.joinPath(workspaceUri, '.builder', 'ablunit', 'ablunit.json')
+ FileUtils.deleteFile(ablunitJson)
+
+ await runAllTests()
assert.fileExists(ablunitJson)
+
+ FileUtils.deleteFile(ablunitJson)
})
- test('proj4.3 - tempDir=.builder/.ablunit', async () => {
+ test('proj4.4 - tempDir=.builder/.ablunit', async () => {
await updateTestProfile('tempDir', '.builder/.ablunit')
await updateTestProfile('profiler.listings', '.builder/.ablunit/.listings')
const workspaceUri = getWorkspaceUri()
+ const ablunitJson = Uri.joinPath(workspaceUri, '.builder', '.ablunit', 'ablunit.json')
+ const listingsDir = Uri.joinPath(workspaceUri, '.builder', '.ablunit', '.listings')
+ FileUtils.deleteFile(ablunitJson)
+ FileUtils.deleteDir(listingsDir)
+
await runAllTests()
+ assert.fileExists(ablunitJson)
+ assert.notDirExists(listingsDir)
+
+ FileUtils.deleteFile(ablunitJson)
+ FileUtils.deleteDir(listingsDir)
+ })
+
+ test('proj4.5 - tempDir=.builder/.ablunit', async () => {
+ await updateTestProfile('tempDir', '.builder/.ablunit')
+ await updateTestProfile('profiler.listings', '.builder/.ablunit/.listings')
+ const workspaceUri = getWorkspaceUri()
const ablunitJson = Uri.joinPath(workspaceUri, '.builder', '.ablunit', 'ablunit.json')
const listingsDir = Uri.joinPath(workspaceUri, '.builder', '.ablunit', '.listings')
+ FileUtils.deleteFile(ablunitJson)
+ FileUtils.deleteDir(listingsDir)
+
+ await runAllTestsWithCoverage()
assert.fileExists(ablunitJson)
assert.dirExists(listingsDir)
+
+ FileUtils.deleteFile(ablunitJson)
+ FileUtils.deleteDir(listingsDir)
})
- test('proj4.4 - tempDir=target', async () => {
+ test('proj4.6 - tempDir=target', async () => {
const workspaceUri = getWorkspaceUri()
const ablunitJson = Uri.joinPath(workspaceUri, 'target', 'ablunit.json')
const progressIni = Uri.joinPath(workspaceUri, 'target', 'progress.ini')
- FileUtils.deleteFile(progressIni)
+ FileUtils.deleteFile([ablunitJson, progressIni])
+
await updateTestProfile('tempDir', 'target')
await runAllTests()
assert.fileExists(ablunitJson)
if (process.platform === 'win32') {
assert.fileExists(progressIni)
}
+
+ FileUtils.deleteFile(ablunitJson)
+ FileUtils.deleteFile(progressIni)
})
})
diff --git a/test/suites/proj7B.test.ts b/test/suites/proj7B.test.ts
index a3e43178..e6275cb2 100644
--- a/test/suites/proj7B.test.ts
+++ b/test/suites/proj7B.test.ts
@@ -1,5 +1,5 @@
import { CancellationError, LogLevel, commands } from 'vscode'
-import { assert, RunStatus, beforeCommon, beforeProj7, cancelTestRun, getCurrentRunData, getTestControllerItemCount, isoDate, log, refreshTests, runAllTests, sleep, waitForTestRunStatus, sleep2 } from '../testCommon'
+import { assert, RunStatus, beforeCommon, beforeProj7, cancelTestRun, getCurrentRunData, getTestControllerItemCount, isoDate, log, refreshTests, runAllTests, waitForTestRunStatus, sleep2 } from '../testCommon'
import { Duration } from '../../src/ABLUnitCommon'
suite('proj7B - Extension Test Suite', () => {
@@ -11,7 +11,7 @@ suite('proj7B - Extension Test Suite', () => {
setup('proj7B - beforeEach', beforeCommon)
- test('proj7B.1 - cancel test refresh', async () => {
+ test.skip('proj7B.1 - cancel test refresh', async () => {
const minCancelTime = 1
const maxCancelTime = 350
const maxRefreshTime = 700
@@ -31,6 +31,7 @@ suite('proj7B - Extension Test Suite', () => {
}
}
+ await sleep2(2)
log.info('cancelling test refresh')
const startCancelTime = new Duration()
await commands.executeCommand('testing.cancelTestRefresh').then(() => {
@@ -47,7 +48,17 @@ suite('proj7B - Extension Test Suite', () => {
const ablfileCount = await getTestControllerItemCount('ABLTestFile')
log.info('controller file count after refresh = ' + ablfileCount)
- assert.assert(ablfileCount > 1 && ablfileCount < 1000, 'ablfileCount should be > 1 and < 500, but is ' + ablfileCount)
+ if (ablfileCount <= 2) {
+ await sleep2(10)
+ const ablfileCount = await getTestControllerItemCount('ABLTestFile')
+ log.info('controller file count after refresh(2) = ' + ablfileCount)
+ }
+ if (ablfileCount <= 2) {
+ await sleep2(10)
+ const ablfileCount = await getTestControllerItemCount('ABLTestFile')
+ log.info('controller file count after refresh(3) = ' + ablfileCount)
+ }
+ assert.assert(ablfileCount > 1 && ablfileCount < 2000, 'ablfileCount should be > 1 and < 500, but is ' + ablfileCount)
const prom = refresh.then(() => {
assert.fail('testing.refreshTests completed without throwing CancellationError')
@@ -68,7 +79,7 @@ suite('proj7B - Extension Test Suite', () => {
// const runTestTime = new Duration()
runAllTests().catch((e: unknown) => { log.info('runAllTests got error: ' + e) })
- await sleep(250)
+ await sleep2(250)
.then(() => { return waitForTestRunStatus(RunStatus.Constructed) })
const elapsedCancelTime = await cancelTestRun(false)
diff --git a/test/suites/proj9.test.ts b/test/suites/proj9.test.ts
index 7beb86c2..aa6c7dd3 100644
--- a/test/suites/proj9.test.ts
+++ b/test/suites/proj9.test.ts
@@ -1,5 +1,5 @@
import { assert, deleteTestFiles, getTestCount, getWorkspaceUri, log, runAllTests, selectProfile, suiteSetupCommon, updateTestProfile, Uri, workspace } from '../testCommon'
-import * as FileUtils from '../../src/FileUtils'
+import * as FileUtils from 'FileUtils'
const testProfileJson = Uri.joinPath(getWorkspaceUri(), '.vscode/ablunit-test-profile.json')
const testProfileBackup = Uri.joinPath(getWorkspaceUri(), '.vscode/ablunit-test-profile.json.backup')
@@ -8,8 +8,7 @@ suite('proj9 - Extension Test Suite', () => {
suiteSetup('proj9 - before', async () => {
await suiteSetupCommon()
- .then(() => { return workspace.fs.copy(testProfileJson, testProfileBackup, { overwrite: true }) })
- .then(() => { return }, (e: unknown) => { throw e })
+ FileUtils.copyFile(testProfileJson, testProfileBackup)
})
setup('proj9 - beforeEach', () => {
@@ -19,27 +18,13 @@ suite('proj9 - Extension Test Suite', () => {
return
})
- teardown('proj9 - afterEach', async () => {
+ teardown('proj9 - afterEach', () => {
FileUtils.deleteFile(testProfileJson)
- await workspace.fs.copy(testProfileBackup, testProfileJson, { overwrite: true })
- // await workspace.fs.copy(testProfileBackup, testProfileJson, { overwrite: true }).then(() => {
- // log.info('teardown return')
- // return
- // }, (e: unknown) => {
- // log.error('teardown error: e=' + e)
- // throw e
- // })
+ FileUtils.copyFile(testProfileBackup, testProfileJson)
})
suiteTeardown('proj9 - after', () => {
- return workspace.fs.delete(testProfileBackup).then(() => { return }, (e: unknown) => { return })
- // await workspace.fs.delete(testProfileBackup).then(() => {
- // log.info('suiteTeardown return')
- // return
- // }, (e: unknown) => {
- // log.error('suiteTeardown error: e=' + e)
- // throw e
- // })
+ FileUtils.deleteFile(testProfileBackup)
})
test('proj9.1 - ${workspaceFolder}/ablunit.json file exists', async () => {
diff --git a/test/suites/workspace1.test.ts b/test/suites/workspace1.test.ts
index 4366807e..790ca48d 100644
--- a/test/suites/workspace1.test.ts
+++ b/test/suites/workspace1.test.ts
@@ -1,36 +1,26 @@
import { Uri, workspace } from 'vscode'
import { assert, getWorkspaceUri, log, runAllTests, suiteSetupCommon, updateTestProfile } from '../testCommon'
+import * as FileUtils from 'FileUtils'
suite('workspace1 - Extension Test Suite', () => {
suiteSetup('workspace1 - before', async () => {
await suiteSetupCommon()
- const files = [
+ FileUtils.deleteFile([
Uri.joinPath(getWorkspaceUri(0), '.vscode', 'ablunit-test-profile.json'),
Uri.joinPath(getWorkspaceUri(0), 'listings'),
Uri.joinPath(getWorkspaceUri(1), 'listings'),
Uri.joinPath(getWorkspaceUri(0), 'workspaceAblunit'),
Uri.joinPath(getWorkspaceUri(1), 'workspaceAblunit'),
- ]
- for (const f of files) {
- log.info('deleting ' + f.fsPath)
- await workspace.fs.delete(f, { recursive: true })
- .then(() => {
- log.info('deleted ' + f.fsPath)
- return
- }, () => {
- log.info('cannot delete ' + f.fsPath + ', does not exist')
- })
- }
+ ])
- await workspace.fs.copy(Uri.joinPath(getWorkspaceUri(1), '.vscode', 'ablunit-test-profile.json'), Uri.joinPath(getWorkspaceUri(1), '.vscode', 'ablunit-test-profile.json.bk'), { overwrite: true })
+ FileUtils.copyFile(Uri.joinPath(getWorkspaceUri(1), '.vscode', 'ablunit-test-profile.json'), Uri.joinPath(getWorkspaceUri(1), '.vscode', 'ablunit-test-profile.json.bk'))
})
- teardown('workspace1 - afterEach', async () => {
- log.info('after')
- await workspace.fs.delete(Uri.joinPath(getWorkspaceUri(0), '.vscode', 'ablunit-test-profile.json')).then(() => { return }, (e: unknown) => { return })
- await workspace.fs.copy(Uri.joinPath(getWorkspaceUri(1), '.vscode', 'ablunit-test-profile.json.bk'), Uri.joinPath(getWorkspaceUri(1), '.vscode', 'ablunit-test-profile.json'), { overwrite: true })
+ teardown('workspace1 - afterEach', () => {
+ FileUtils.deleteFile(Uri.joinPath(getWorkspaceUri(0), '.vscode', 'ablunit-test-profile.json'))
+ FileUtils.copyFile(Uri.joinPath(getWorkspaceUri(1), '.vscode', 'ablunit-test-profile.json.bk'), Uri.joinPath(getWorkspaceUri(1), '.vscode', 'ablunit-test-profile.json'))
})
test('workspace1.1 - /ablunit.json file exists', async () => {
diff --git a/test/testCommon.ts b/test/testCommon.ts
index 1ae2b444..d9f721d9 100644
--- a/test/testCommon.ts
+++ b/test/testCommon.ts
@@ -9,7 +9,6 @@ import {
WorkspaceFolder, commands, extensions, window,
workspace,
FileCoverageDetail,
- Position,
TestItem
} from 'vscode'
import { ABLResults } from '../src/ABLResults'
@@ -449,16 +448,6 @@ export function deleteTestFiles () {
FileUtils.deleteFile(Uri.joinPath(workspaceUri, 'results.xml'))
}
-export function getSessionTempDir () {
- if (process.platform === 'win32') {
- return Uri.file('c:/temp/ablunit')
- }
- if(process.platform === 'linux') {
- return Uri.file('/tmp/ablunit')
- }
- throw new Error('Unsupported platform: ' + process.platform)
-}
-
export async function getTestCount (resultsJson: Uri, status = 'tests') {
const count = await workspace.fs.readFile(resultsJson).then((content) => {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
@@ -518,7 +507,7 @@ export async function runAllTests (doRefresh = true, waitForResults = true, with
log.info(tag + 'running all tests')
if (doRefresh) {
- log.info('refresh before run - start')
+ log.info(tag + ' refresh before run - start')
await refreshTests()
// await refreshTests()
// .then(() => {
@@ -527,30 +516,30 @@ export async function runAllTests (doRefresh = true, waitForResults = true, with
// }, (e: unknown) => { throw e })
}
- log.info('testing.runAll starting (waitForResults=' + waitForResults + ')')
+ log.info(testCommand + ' starting (waitForResults=' + waitForResults + ')')
const r = await commands.executeCommand(testCommand)
.then((r) => {
log.info(tag + 'command ' + testCommand +' complete! (r=' + r + ')')
return sleep(250)
}, (e: unknown) => {
- log.error('testing.runAll failed: ' + e)
+ log.error(tag + testCommand + ' failed: ' + e)
throw e
})
.then(() => {
- log.info(tag + 'testing.runAll completed - start getResults()')
+ log.info(tag + testCommand + ' completed - start getResults()')
if (!waitForResults) { return [] }
return getResults(1, tag)
})
.then((r) => {
if (r.length >= 0) {
const fUri = r[0]?.cfg.ablunitConfig.optionsUri.filenameUri
- log.info(tag + 'testing.runAll command complete (filename=' + fUri.fsPath + ', r=' + r + ')')
+ log.info(tag + testCommand + ' command complete (filename=' + fUri.fsPath + ', r.length=' + r.length + ')')
return FileUtils.doesFileExist(fUri)
}
return false
}, (e: unknown) => {
runAllTestsDuration?.stop()
- throw new Error('testing.runAll failed: ' + e)
+ throw new Error(testCommand + ' failed: ' + e)
})
runAllTestsDuration.stop()
log.info(tag + 'runAllTests complete (r=' + r + ')')
@@ -564,26 +553,29 @@ export function runAllTestsWithCoverage () {
export function runTestsInFile (filename: string, len = 1, coverage = false) {
const testpath = toUri(filename)
log.info('runnings tests in file ' + testpath.fsPath)
+ let command = 'testing.runCurrentFile'
+ if (coverage) {
+ command = 'testing.coverageCurrentFile'
+ }
+
return commands.executeCommand('vscode.open', testpath)
.then(() => {
runTestsDuration = new Duration('runTestsInFile')
- if (coverage) {
- return commands.executeCommand('testing.coverageCurrentFile')
- }
- return commands.executeCommand('testing.runCurrentFile')
- }, (e: unknown) => {
- throw e
+ return commands.executeCommand(command)
})
.then((r: unknown) => {
+ log.debug('executeCommand(' + command + ').then completed successfully (r=' + JSON.stringify(r, null, 2) + ')')
runTestsDuration?.stop()
- return getResults(len)
+ return refreshData(len)
}, (e: unknown) => {
+ log.debug('executeCOmmand(' + command + ').catch failed: ' + e)
runTestsDuration?.stop()
throw e
})
}
-export function runTestAtLine (filename: string, line: number, len = 1) {
+export function runTestAtLine (filename: string, line: number, len = 1, withCoverage = false) {
+ const command = withCoverage ? 'testing.coverageAtCursor' : 'testing.runAtCursor'
const testpath = Uri.joinPath(getWorkspaceUri(), filename)
log.info('running test at line ' + line + ' in ' + testpath.fsPath)
return commands.executeCommand('vscode.open', testpath)
@@ -593,15 +585,15 @@ export function runTestAtLine (filename: string, line: number, len = 1) {
} else {
throw new Error('vscode.window.activeTextEditor is undefined')
}
- runTestsDuration = new Duration('runTestsAtLine')
- return commands.executeCommand('testing.runAtCursor')
+ runTestsDuration = new Duration(command)
+ return commands.executeCommand(command)
})
.then(() => {
runTestsDuration?.stop()
- return getResults(len)
+ return refreshData(len)
})
.then(() => {
- log.info('testing.runAtCursor complete')
+ log.info(command + ' complete')
return
}, (e: unknown) => { throw e })
}
@@ -657,7 +649,6 @@ export async function waitForTestRunStatus (waitForStatus: RunStatus) {
await sleep2(500, 'waitForTestRunStatus count=' + count + '; currentStatus=\'' + currentStatus.toString() + '\' + , waitForStatus=\'' + waitForStatus.toString() + '\'')
currentStatus = await getCurrentRunData()
.then((runData) => {
- log.info('100 runData.length=' + runData.length)
if (runData.length > 0) {
return runData[0].status
}
@@ -682,6 +673,7 @@ export async function waitForTestRunStatus (waitForStatus: RunStatus) {
}
export async function cancelTestRun (resolveCurrentRunData = true) {
+ log.info('cancelling test run')
cancelTestRunDuration = new Duration()
if (resolveCurrentRunData) {
const status = getCurrentRunData().then((resArr) => {
@@ -813,6 +805,9 @@ export function refreshData (resultsLen = 0) {
// log.info('refreshData command complete (resp=' + JSON.stringify(resp) + ')')
const refs = resp as IExtensionTestReferences
log.info('getExtensionTestReferences command complete (resp.length=' + refs.recentResults.length + ')')
+ if (refs.recentError) {
+ throw refs.recentError
+ }
// log.info('refs=' + JSON.stringify(refs))
if (refs.recentResults.length > 0) {
@@ -855,12 +850,10 @@ export function getTestItem (uri: Uri) {
}
return commands.executeCommand('_ablunit.getTestItem', uri)
.then((i: unknown) => {
- log.info('200')
if (!i) {
throw new Error('TestItem not found for ' + uri.fsPath)
}
const item = i as TestItem
- log.info('202 item.id=' + item.id)
return item
}, (e: unknown) => { throw e })
}
@@ -1000,11 +993,12 @@ export async function getResults (len = 1, tag?: string): Promise
class AssertTestResults {
assertResultsCountByStatus (expectedCount: number, status: 'passed' | 'failed' | 'errored' | 'skipped' | 'all') {
- const res = recentResults?.[0].ablResults?.resultsJson[0]
- if (!res) {
+ const resJson = recentResults?.[recentResults.length - 1].ablResults?.resultsJson
+ if (!resJson || resJson.length === 0) {
assertParent.fail('No results found. Expected ' + expectedCount + ' ' + status + ' tests')
return
}
+ const res = resJson[resJson.length - 1]
switch (status) {
// case 'passed': actualCount = res.passed; break
@@ -1063,10 +1057,11 @@ class AssertTestResults {
assert.fail('expected TimeoutError, but no error was thrown')
}
if (e instanceof Error) {
- assert.equal(e.name, 'TimeoutError', 'expected TimeoutError, but got ' + e.name + '\n\n' + JSON.stringify(e, null, 2))
- return
+ assert.equal(e.name, 'TimeoutError', 'expected TimeoutError, but got e=' + e + '\n\n' + JSON.stringify(e, null, 2))
+ } else {
+ assert.fail('expected TimeoutError, but got e=' + e + '\n\n' + JSON.stringify(e, null, 2))
}
- assert.fail('expected TimeoutError, but non-Error type detected: ' + e + '\n\n' + JSON.stringify(e, null, 2))
+ return true
}
}
@@ -1075,12 +1070,16 @@ function getLineExecutions (coverage: FileCoverageDetail[] | never[], lineNum: n
throw new Error('coverage is undefined')
}
- const details = coverage.filter((d: FileCoverageDetail) => {
- const r = d.location as Position
- return r.line == lineNum
+ const details = coverage.filter((d: FileCoverageDetail) => {
+ if (d.location instanceof vscode.Range) {
+ return d.location.start.line == lineNum
+ } else if (d.location instanceof vscode.Position) {
+ return d.location.line == lineNum
+ }
})
if (details.length === 0) {
- throw new Error('Could not find line ' + lineNum + ' in coverage')
+ log.error('not find line ' + lineNum + ' in coverage (details.length=' + details.length + ')')
+ throw new Error('Could not find line ' + lineNum + ' in coverage (details.length=' + details.length + ')')
}
let executed = 0
@@ -1088,6 +1087,7 @@ function getLineExecutions (coverage: FileCoverageDetail[] | never[], lineNum: n
if (typeof l.executed === 'number') {
executed += l.executed
} else {
+ log.error('executed is not a number! details=' + JSON.stringify(details))
throw new Error('executed is not a number! details=' + JSON.stringify(details))
}
}
@@ -1128,6 +1128,9 @@ export const assert = {
greaterOrEqual (testValue: number, greaterThan: number, message?: string) {
assertParent.ok(testValue >= greaterThan, message)
},
+ less (testValue: number, lessThan: number, message?: string) {
+ assertParent.ok(testValue < lessThan, message)
+ },
lessOrEqual (testValue: number, lessThan: number, message?: string) {
assertParent.ok(testValue <= lessThan, message)
},
@@ -1159,38 +1162,26 @@ export const assert = {
fileExists: (...files: (string | Uri)[]) => {
if (files.length === 0) { throw new Error('no file(s) specified') }
- for (let file of files) {
- if (!(file instanceof Uri)) {
- file = toUri(file)
- }
- assertParent.ok(FileUtils.doesFileExist(file), 'file does not exist: ' + fileToString(file))
+ for (const file of files) {
+ assertParent.ok(FileUtils.doesFileExist(toUri(file)), 'file does not exist: ' + fileToString(file))
}
},
notFileExists: (...files: string[] | Uri[]) => {
if (files.length === 0) { throw new Error('no file(s) specified') }
- for (let file of files) {
- if (!(file instanceof Uri)) {
- file = toUri(file)
- }
- assertParent.ok(!FileUtils.doesFileExist(file), 'file exists: ' + fileToString(file))
+ for (const file of files) {
+ assertParent.ok(!FileUtils.doesFileExist(toUri(file)), 'file exists: ' + fileToString(file))
}
},
dirExists: (...dirs: (string | Uri)[]) => {
if (dirs.length === 0) { throw new Error('no dir(s) specified') }
- for (let dir of dirs) {
- if (!(dir instanceof Uri)) {
- dir = toUri(dir)
- }
- assertParent.ok(FileUtils.doesDirExist(dir), 'dir does not exist: ' + fileToString(dir))
+ for (const dir of dirs) {
+ assertParent.ok(FileUtils.doesDirExist(toUri(dir)), 'dir does not exist: ' + fileToString(dir))
}
},
notDirExists: (...dirs: string[] | Uri[]) => {
if (dirs.length === 0) { throw new Error('no dir(s) specified') }
- for (let dir of dirs) {
- if (!(dir instanceof Uri)) {
- dir = toUri(dir)
- }
- assertParent.ok(!FileUtils.doesDirExist(dir), 'dir exists: ' + fileToString(dir))
+ for (const dir of dirs) {
+ assertParent.ok(!FileUtils.doesDirExist(toUri(dir)), 'dir exists: ' + fileToString(dir))
}
},
@@ -1232,11 +1223,11 @@ export const assert = {
return
}
- const actual = recentResults[recentResults.length - 1].coverage.size
+ const actual = recentResults[recentResults.length - 1].statementCoverage.size
let msg = 'covered files (' + actual + ') != ' + expected
if (actual != expected) {
msg += '\nfound:'
- for (const c of recentResults[recentResults.length - 1].coverage) {
+ for (const c of recentResults[recentResults.length - 1].statementCoverage) {
msg += '\n * ' + c[0]
// log.info('covered file: ' + c[0])
}
@@ -1260,7 +1251,7 @@ export const assert = {
return
}
- const coverage = recentResults[recentResults.length - 1].coverage.get(file.fsPath)
+ const coverage = recentResults[recentResults.length - 1].statementCoverage.get(file.fsPath)
if (!coverage) {
assert.fail('no coverage found for ' + file.fsPath)
return
@@ -1268,12 +1259,10 @@ export const assert = {
for (const line of lines) {
log.info('checking line ' + line + ' in ' + file.fsPath)
const executions = getLineExecutions(coverage, line)
- if (!executed) {
- log.info(' - not executed')
- assert.equal(executions, 0, 'line ' + line + ' in ' + file.fsPath + ' was executed (lineCoverage.executed=' + executions + ')')
- } else {
- log.info(' - executed')
+ if (executed) {
assert.greater(executions, 0, 'line ' + line + ' in ' + file.fsPath + ' was not executed (lineCoverage.executed=' + executions + ')')
+ } else {
+ assert.equal(executions, 0, 'line ' + line + ' in ' + file.fsPath + ' was executed (lineCoverage.executed=' + executions + ')')
}
}
},
diff --git a/test_projects/externalSource.p b/test_projects/externalSource.p
new file mode 100644
index 00000000..cbc9313f
--- /dev/null
+++ b/test_projects/externalSource.p
@@ -0,0 +1,26 @@
+message 100 "EXTERNAL SOURCE START".
+
+if true then
+ run proc1.
+else
+ run proc2.
+
+message 103 "EXTERNAL SOURCE END".
+
+procedure proc1:
+ define variable cnt as integer no-undo.
+ do cnt=1 to 5:
+ message 'cnt=' + string(cnt).
+ if cnt mod 5 = 0 then
+ message 200.
+ if cnt mod 7 = 0 then
+ message 201.
+ end.
+end procedur.
+
+procedure proc2:
+ if true then
+ message 300.
+ else
+ message 301.
+end procedure.
diff --git a/test_projects/proj0/.vscode/ablunit-test-profile.proj0.17.json b/test_projects/proj0/.vscode/ablunit-test-profile.proj0.17.json
new file mode 100644
index 00000000..77fd21c2
--- /dev/null
+++ b/test_projects/proj0/.vscode/ablunit-test-profile.proj0.17.json
@@ -0,0 +1,14 @@
+{
+ "configurations": [
+ {
+ "options": {
+ "output": {
+ "writeJson": true
+ }
+ },
+ "profiler": {
+ "writeJson": true
+ }
+ }
+ ]
+}
diff --git a/test_projects/proj0/src/multiTestProc.p b/test_projects/proj0/src/multiTestProc.p
new file mode 100644
index 00000000..6cfc9888
--- /dev/null
+++ b/test_projects/proj0/src/multiTestProc.p
@@ -0,0 +1,15 @@
+
+
+if true then
+ message 900.
+else
+ message 901.
+
+define variable cnt as integer.
+do cnt = 1 to 5:
+ message 902.
+ if cnt mod 3 = 0 then
+ message 903.
+ else if cnt mod 7 = 0 then
+ message 904. // not executed
+end.
diff --git a/test_projects/proj0/src/simpleError.p b/test_projects/proj0/src/simpleError.p
new file mode 100644
index 00000000..8234876a
--- /dev/null
+++ b/test_projects/proj0/src/simpleError.p
@@ -0,0 +1,10 @@
+block-level on error undo, throw.
+
+@Test.
+procedure simple-pass-proc :
+end procedure.
+
+@Test.
+procedure simple-error-proc :
+ OpenEdge.Core.Assert:Equals(1,2).
+end procedure.
diff --git a/test_projects/proj0/src/test19.p b/test_projects/proj0/src/test19.p
new file mode 100644
index 00000000..6738fd47
--- /dev/null
+++ b/test_projects/proj0/src/test19.p
@@ -0,0 +1,41 @@
+
+block-level on error undo, throw.
+
+if true then
+ message "first".
+else
+do:
+ message "second".
+ message "third".
+end.
+
+file-info:file-name = '.'.
+message 'file-info:full-pathname=' + file-info:full-pathname.
+
+define variable runpath as character no-undo.
+runpath = replace(file-info:full-pathname, '~\', '/').
+runpath = substring(runpath, 1, r-index(runpath, '/')) + 'externalSource.p'.
+message runpath.
+
+run value(runpath).
+
+@Test.
+procedure testProcedureName :
+ OpenEdge.Core.Assert:Equals(1,1).
+ run procedureNotTest.
+end procedure.
+
+@Test.
+procedure testProcedureName2 :
+ OpenEdge.Core.Assert:Equals(1,1).
+end procedure.
+
+procedure notRunProc :
+ if true then
+ message "notRunProc".
+end procedure.
+
+procedure procedureNotTest:
+ if false then
+ message "procedureNotTest".
+end procedure.
diff --git a/test_projects/proj0/src/test_17.cls b/test_projects/proj0/src/test_17.cls
new file mode 100644
index 00000000..80829a70
--- /dev/null
+++ b/test_projects/proj0/src/test_17.cls
@@ -0,0 +1,46 @@
+block-level on error undo, throw.
+using OpenEdge.Core.Assert.
+
+class test_17 :
+
+ define property propWithGet as character no-undo get:
+ if true then
+ do:
+ message 100.
+ define variable cnt as integer no-undo.
+ do cnt = 1 to 10:
+ message 200.
+ end.
+ propWithGet = 'xyz'.
+ end.
+ else
+ do:
+ message 101.
+ propWithGet = 'abx'.
+ end.
+ end get. set.
+
+ constructor test_17 () :
+ end constructor.
+
+ define property propWithSet as integer no-undo get. set (invar as integer) :
+ if false then
+ do:
+ message 200.
+ propWithSet = 1.
+ end.
+ else
+ do:
+ message 201.
+ propWithSet = -2.
+ end.
+ end set.
+
+ @Test.
+ method public void testMethod () :
+ propWithSet = 1.
+ if propWithGet <> string(propWithSet) then
+ assert:equals(1,1).
+ end method.
+
+end class.
diff --git a/test_projects/proj0/src/test_18A.cls b/test_projects/proj0/src/test_18A.cls
new file mode 100644
index 00000000..3514c624
--- /dev/null
+++ b/test_projects/proj0/src/test_18A.cls
@@ -0,0 +1,14 @@
+block-level on error undo, throw.
+using OpenEdge.Core.Assert.
+
+class test_18A :
+
+ @Test.
+ method public void testMethod () :
+ define variable x as test_18_notATest no-undo.
+ x = new test_18_notATest().
+ message 100 x:propWithGet x:propWithSet.
+ assert:equals(1,1).
+ end method.
+
+end class.
diff --git a/test_projects/proj0/src/test_18B.cls b/test_projects/proj0/src/test_18B.cls
new file mode 100644
index 00000000..2fcb5af6
--- /dev/null
+++ b/test_projects/proj0/src/test_18B.cls
@@ -0,0 +1,17 @@
+block-level on error undo, throw.
+using OpenEdge.Core.Assert.
+
+class test_18B :
+
+ @Test.
+ method public void testMethod () :
+ define variable x as test_18_notATest no-undo.
+ x = new test_18_notATest().
+ if true then
+ assert:equals(2,2).
+ else
+ assert:equals(1,2).
+ message 'done'.
+ end method.
+
+end class.
diff --git a/test_projects/proj0/src/test_18_notATest.cls b/test_projects/proj0/src/test_18_notATest.cls
new file mode 100644
index 00000000..d6dd153b
--- /dev/null
+++ b/test_projects/proj0/src/test_18_notATest.cls
@@ -0,0 +1,52 @@
+block-level on error undo, throw.
+using OpenEdge.Core.Assert.
+
+class test_18_notATest :
+
+ define property propWithGet as character no-undo get:
+ if true then
+ do:
+ message 100.
+ propWithGet = 'xyz'.
+ end.
+ else
+ do:
+ message 101.
+ propWithGet = 'abx'.
+ end.
+ end get. set.
+
+ define property propWithSet as integer no-undo get. set (invar as integer) :
+ if false then
+ do:
+ message 200.
+ propWithSet = 1.
+ end.
+ else
+ do:
+ message 201.
+ propWithSet = -2.
+ end.
+ end set.
+
+ define property propOther as logical get. set.
+
+ define property propOther2 as integer get:
+ message 300.
+ return 1.
+ end get. set.
+
+ define property propWithGetSet as integer no-undo get:
+ message 100.
+ return 99.
+ end get. set (invar as integer) :
+ message 200.
+ propWithGetSet = invar.
+ end set.
+
+ method public void testMethod () :
+ if propWithGet <> string(propWithSet) then
+ assert:equals(1,1).
+ end method.
+
+end class.
diff --git a/test_projects/proj0/src/threeTestMethods.cls b/test_projects/proj0/src/threeTestMethods.cls
index 682e4a63..8b16a2e8 100644
--- a/test_projects/proj0/src/threeTestMethods.cls
+++ b/test_projects/proj0/src/threeTestMethods.cls
@@ -3,14 +3,30 @@ CLASS threeTestMethods:
@Test.
METHOD PUBLIC VOID CustomerFormTest ():
+ message 100.
+ run multiTestProc.p.
END METHOD.
@Test.
METHOD PUBLIC VOID CustomerViewerTest ():
+ if true then
+ message 200.
+ else
+ message 201.
+
+ define variable cnt as integer no-undo.
+ do cnt = 1 to 10:
+ message 'cnt=' + string(cnt).
+ end.
+
+ run multiTestProc.p.
END METHOD.
@Test (expected="Progress.Lang.OtherError").
- METHOD PUBLIC VOID CustomerGridTest ():
+ METHOD PUBLIC VOID CustomerGridTest () :
+
+ message 300.
END METHOD.
+ //comment
END CLASS.
diff --git a/test_projects/proj0/src/threeTestProcedures.p b/test_projects/proj0/src/threeTestProcedures.p
index 10d92ba2..689c0ca9 100644
--- a/test_projects/proj0/src/threeTestProcedures.p
+++ b/test_projects/proj0/src/threeTestProcedures.p
@@ -1,12 +1,46 @@
+
+message 100.
+
@Test.
procedure CustomerFormTest :
+ message 200.
+ run proc1.
end procedure.
+//comment
+
+procedure notATest1:
+ if true then
+ message 201.
+ else
+ message 202.
+end procedure.
+//comment
+
@Test.
+
procedure CustomerViewerTest :
+ if true then
+ message 300.
+ else
+ message 301.
+ run multiTestProc.p.
end procedure.
+//comment
+//comment
@Test (expected="Progress.Lang.OtherError").
procedure CustomerGridTest :
+ message 400.
+ run proc1.
+ message 401.
+end procedure.
+
+//test
+//comment
+
+
+procedure proc1:
+ message 500.
end procedure.
diff --git a/test_projects/proj1/.vscode/tasks.json b/test_projects/proj1/.vscode/tasks.json
index f37a9daf..15accc72 100644
--- a/test_projects/proj1/.vscode/tasks.json
+++ b/test_projects/proj1/.vscode/tasks.json
@@ -3,11 +3,14 @@
{
"label": "ant build",
"type": "shell",
- "command": "${env:DLC}/ant/bin/ant compile -Dxref=false",
+ "command": "${env:DLC}/ant/bin/ant",
"linux": {
- "command": "ant compile -Dxref=false"
+ "command": "ant",
},
- "args": [],
+ "args": [
+ "compile",
+ "-Dxref=false",
+ ],
"group": {
"kind": "build",
"isDefault": true
@@ -17,11 +20,15 @@
{
"label": "ant build min-size",
"type": "shell",
- "command": "${env:DLC}/ant/bin/ant compile -DminSize=true -Dxref=true",
+ "command": "${env:DLC}/ant/bin/ant",
"linux": {
- "command": "ant compile -DminSize=true -Dxref=true"
+ "command": "ant"
},
- "args": [],
+ "args": [
+ "compile",
+ "-DminSize=true",
+ "-Dxref=true",
+ ],
"group": {
"kind": "build",
"isDefault": true
diff --git a/test_projects/proj1/openedge-project.proj1.9.json b/test_projects/proj1/openedge-project.proj1.98.json
similarity index 100%
rename from test_projects/proj1/openedge-project.proj1.9.json
rename to test_projects/proj1/openedge-project.proj1.98.json
diff --git a/test_projects/proj1/openedge-project.proj1.8.json b/test_projects/proj1/openedge-project.proj1.99.json
similarity index 100%
rename from test_projects/proj1/openedge-project.proj1.8.json
rename to test_projects/proj1/openedge-project.proj1.99.json
diff --git a/tsconfig.json b/tsconfig.json
index 4db8ee52..7a841200 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -28,7 +28,7 @@
// ],
"allowJs": true,
- "checkJs": true,
+ // "checkJs": true,
// "allowSyntheticDefaultImports": true, // enabled by "esModuleInterop"
// "allowUnreachableCode": false, // editor warning when undefined
@@ -50,7 +50,7 @@
"strictFunctionTypes": true,
"strictNullChecks": true,
"strictPropertyInitialization": true,
- "useUnknownInCatchVariables": false,
+ "useUnknownInCatchVariables": true,
},
"include": [
"./**/*.ts",