Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Migrate path translation tests to acceptance tests #2122

Merged
merged 8 commits into from
Jan 17, 2025
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
bundle:
name: path_translation_nominal
name: fallback

include:
- "resources/*.yml"
Expand Down
67 changes: 67 additions & 0 deletions acceptance/bundle/paths/fallback/output.job.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
[
{
"job_cluster_key": "default",
"notebook_task": {
"notebook_path": "/Workspace/Users/tester@databricks.com/.bundle/fallback/development/files/src/notebook"
},
"task_key": "notebook_example"
},
{
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/tester@databricks.com/.bundle/fallback/development/files/src/file.py"
},
"task_key": "spark_python_example"
},
{
"dbt_task": {
"commands": [
"dbt run",
"dbt run"
],
"project_directory": "/Workspace/Users/tester@databricks.com/.bundle/fallback/development/files/src/dbt_project"
},
"job_cluster_key": "default",
"task_key": "dbt_example"
},
{
"job_cluster_key": "default",
"sql_task": {
"file": {
"path": "/Workspace/Users/tester@databricks.com/.bundle/fallback/development/files/src/sql.sql"
},
"warehouse_id": "cafef00d"
},
"task_key": "sql_example"
},
{
"job_cluster_key": "default",
"libraries": [
{
"whl": "dist/wheel1.whl"
},
{
"whl": "dist/wheel2.whl"
}
],
"python_wheel_task": {
"package_name": "my_package"
},
"task_key": "python_wheel_example"
},
{
"job_cluster_key": "default",
"libraries": [
{
"jar": "target/jar1.jar"
},
{
"jar": "target/jar2.jar"
}
],
"spark_jar_task": {
"main_class_name": "com.example.Main"
},
"task_key": "spark_jar_example"
}
]
22 changes: 22 additions & 0 deletions acceptance/bundle/paths/fallback/output.pipeline.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
[
{
"file": {
"path": "/Workspace/Users/tester@databricks.com/.bundle/fallback/development/files/src/file1.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/tester@databricks.com/.bundle/fallback/development/files/src/notebook1"
}
},
{
"file": {
"path": "/Workspace/Users/tester@databricks.com/.bundle/fallback/development/files/src/file2.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/tester@databricks.com/.bundle/fallback/development/files/src/notebook2"
}
}
]
18 changes: 18 additions & 0 deletions acceptance/bundle/paths/fallback/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@

>>> $CLI bundle validate -t development -o json

Exit code: 0

>>> $CLI bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]

Name: fallback
Target: error
Workspace:
User: tester@databricks.com
Path: /Workspace/Users/tester@databricks.com/.bundle/fallback/error

Found 1 error

Exit code: 1
Original file line number Diff line number Diff line change
Expand Up @@ -4,33 +4,45 @@ resources:
name: "placeholder"
tasks:
- task_key: notebook_example
job_cluster_key: default
notebook_task:
notebook_path: "this value is overridden"

- task_key: spark_python_example
job_cluster_key: default
spark_python_task:
python_file: "this value is overridden"

- task_key: dbt_example
job_cluster_key: default
dbt_task:
project_directory: "this value is overridden"
commands:
- "dbt run"

- task_key: sql_example
job_cluster_key: default
sql_task:
file:
path: "this value is overridden"
warehouse_id: cafef00d

- task_key: python_wheel_example
job_cluster_key: default
python_wheel_task:
package_name: my_package
libraries:
- whl: ../dist/wheel1.whl

- task_key: spark_jar_example
job_cluster_key: default
spark_jar_task:
main_class_name: com.example.Main
libraries:
- jar: ../target/jar1.jar

# Include a job cluster for completeness
job_clusters:
- job_cluster_key: default
new_cluster:
spark_version: 15.4.x-scala2.12
10 changes: 10 additions & 0 deletions acceptance/bundle/paths/fallback/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
errcode trace $CLI bundle validate -t development -o json > output.tmp.json

# Capture job tasks
jq '.resources.jobs.my_job.tasks' output.tmp.json > output.job.json

# Capture pipeline libraries
jq '.resources.pipelines.my_pipeline.libraries' output.tmp.json > output.pipeline.json

# Expect failure for the "error" target
errcode trace $CLI bundle validate -t error
1 change: 1 addition & 0 deletions acceptance/bundle/paths/fallback/script.cleanup
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
rm -f output.tmp.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
bundle:
name: path_translation_fallback
name: nominal

include:
- "resources/*.yml"
Expand Down
89 changes: 89 additions & 0 deletions acceptance/bundle/paths/nominal/output.job.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
[
{
"job_cluster_key": "default",
"notebook_task": {
"notebook_path": "/Workspace/Users/tester@databricks.com/.bundle/nominal/development/files/src/notebook"
},
"task_key": "notebook_example"
},
{
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/tester@databricks.com/.bundle/nominal/development/files/src/file.py"
},
"task_key": "spark_python_example"
},
{
"dbt_task": {
"commands": [
"dbt run",
"dbt run"
],
"project_directory": "/Workspace/Users/tester@databricks.com/.bundle/nominal/development/files/src/dbt_project"
},
"job_cluster_key": "default",
"task_key": "dbt_example"
},
{
"job_cluster_key": "default",
"sql_task": {
"file": {
"path": "/Workspace/Users/tester@databricks.com/.bundle/nominal/development/files/src/sql.sql"
},
"warehouse_id": "cafef00d"
},
"task_key": "sql_example"
},
{
"job_cluster_key": "default",
"libraries": [
{
"whl": "dist/wheel1.whl"
},
{
"whl": "dist/wheel2.whl"
}
],
"python_wheel_task": {
"package_name": "my_package"
},
"task_key": "python_wheel_example"
},
{
"job_cluster_key": "default",
"libraries": [
{
"jar": "target/jar1.jar"
},
{
"jar": "target/jar2.jar"
}
],
"spark_jar_task": {
"main_class_name": "com.example.Main"
},
"task_key": "spark_jar_example"
},
{
"for_each_task": {
"task": {
"notebook_task": {
"notebook_path": "/Workspace/Users/tester@databricks.com/.bundle/nominal/development/files/src/notebook"
}
}
},
"job_cluster_key": "default",
"task_key": "for_each_notebook_example"
},
{
"for_each_task": {
"task": {
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/tester@databricks.com/.bundle/nominal/development/files/src/file.py"
}
}
},
"task_key": "for_each_spark_python_example"
}
]
22 changes: 22 additions & 0 deletions acceptance/bundle/paths/nominal/output.pipeline.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
[
{
"file": {
"path": "/Workspace/Users/tester@databricks.com/.bundle/nominal/development/files/src/file1.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/tester@databricks.com/.bundle/nominal/development/files/src/notebook1"
}
},
{
"file": {
"path": "/Workspace/Users/tester@databricks.com/.bundle/nominal/development/files/src/file2.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/tester@databricks.com/.bundle/nominal/development/files/src/notebook2"
}
}
]
18 changes: 18 additions & 0 deletions acceptance/bundle/paths/nominal/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@

>>> $CLI bundle validate -t development -o json

Exit code: 0

>>> $CLI bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]

Name: nominal
Target: error
Workspace:
User: tester@databricks.com
Path: /Workspace/Users/tester@databricks.com/.bundle/nominal/error

Found 1 error

Exit code: 1
Original file line number Diff line number Diff line change
Expand Up @@ -4,38 +4,45 @@ resources:
name: "placeholder"
tasks:
- task_key: notebook_example
job_cluster_key: default
notebook_task:
notebook_path: "this value is overridden"

- task_key: spark_python_example
job_cluster_key: default
spark_python_task:
python_file: "this value is overridden"

- task_key: dbt_example
job_cluster_key: default
dbt_task:
project_directory: "this value is overridden"
commands:
- "dbt run"

- task_key: sql_example
job_cluster_key: default
sql_task:
file:
path: "this value is overridden"
warehouse_id: cafef00d

- task_key: python_wheel_example
job_cluster_key: default
python_wheel_task:
package_name: my_package
libraries:
- whl: ../dist/wheel1.whl

- task_key: spark_jar_example
job_cluster_key: default
spark_jar_task:
main_class_name: com.example.Main
libraries:
- jar: ../target/jar1.jar

- task_key: for_each_notebook_example
job_cluster_key: default
for_each_task:
task:
notebook_task:
Expand All @@ -44,5 +51,12 @@ resources:
- task_key: for_each_spark_python_example
for_each_task:
task:
job_cluster_key: default
spark_python_task:
python_file: "this value is overridden"

# Include a job cluster for completeness
job_clusters:
- job_cluster_key: default
new_cluster:
spark_version: 15.4.x-scala2.12
10 changes: 10 additions & 0 deletions acceptance/bundle/paths/nominal/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
errcode trace $CLI bundle validate -t development -o json > output.tmp.json

# Capture job tasks
jq '.resources.jobs.my_job.tasks' output.tmp.json > output.job.json

# Capture pipeline libraries
jq '.resources.pipelines.my_pipeline.libraries' output.tmp.json > output.pipeline.json

# Expect failure for the "error" target
errcode trace $CLI bundle validate -t error
1 change: 1 addition & 0 deletions acceptance/bundle/paths/nominal/script.cleanup
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
rm -f output.tmp.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"paths": [
"/Workspace/remote/src/file1.py",
"/Workspace/remote/src/file1.py"
]
}
Loading
Loading