Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add test for overriding list variable #2099

Merged
merged 2 commits into from
Jan 9, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 5 additions & 14 deletions acceptance/bundle/variables/complex/databricks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ resources:
- task_key: test
job_cluster_key: key
libraries: ${variables.libraries.value}
# specific fields of complex variable are referenced:
task_key: "task with spark version ${var.cluster.spark_version} and jar ${var.libraries[0].jar}"

variables:
Expand All @@ -35,14 +36,6 @@ variables:
- jar: "/path/to/jar"
- egg: "/path/to/egg"
- whl: "/path/to/whl"
complexvar:
type: complex
description: "A complex variable"
default:
key1: "value1"
key2: "value2"
key3: "value3"


targets:
default:
Expand All @@ -51,15 +44,13 @@ targets:
variables:
node_type: "Standard_DS3_v3"
cluster:
# complex variables are not merged, so missing variables (policy_id) are not inherited
spark_version: "14.2.x-scala2.11"
node_type_id: ${var.node_type}
num_workers: 4
spark_conf:
spark.speculation: false
spark.databricks.delta.retentionDurationCheck.enabled: false
complexvar:
type: complex
default:
key1: "1"
key2: "2"
key3: "3"
libraries:
- jar: "/newpath/to/jar"
- whl: "/newpath/to/whl"
14 changes: 0 additions & 14 deletions acceptance/bundle/variables/complex/out.default.json
Original file line number Diff line number Diff line change
Expand Up @@ -76,20 +76,6 @@
"spark_version": "13.2.x-scala2.11"
}
},
"complexvar": {
"default": {
"key1": "value1",
"key2": "value2",
"key3": "value3"
},
"description": "A complex variable",
"type": "complex",
"value": {
"key1": "value1",
"key2": "value2",
"key3": "value3"
}
},
"libraries": {
"default": [
{
Expand Down
37 changes: 7 additions & 30 deletions acceptance/bundle/variables/complex/out.dev.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,13 @@
"job_cluster_key": "key",
"libraries": [
{
"jar": "/path/to/jar"
"jar": "/newpath/to/jar"
},
{
"egg": "/path/to/egg"
},
{
"whl": "/path/to/whl"
"whl": "/newpath/to/whl"
}
],
"task_key": "task with spark version 14.2.x-scala2.11 and jar /path/to/jar"
"task_key": "task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar"
}
]
}
Expand Down Expand Up @@ -70,43 +67,23 @@
"spark_version": "14.2.x-scala2.11"
}
},
"complexvar": {
"default": {
"key1": "1",
"key2": "2",
"key3": "3"
},
"description": "A complex variable",
"type": "complex",
"value": {
"key1": "1",
"key2": "2",
"key3": "3"
}
},
"libraries": {
"default": [
{
"jar": "/path/to/jar"
},
{
"egg": "/path/to/egg"
"jar": "/newpath/to/jar"
},
{
"whl": "/path/to/whl"
"whl": "/newpath/to/whl"
}
],
"description": "A libraries definition",
"type": "complex",
"value": [
{
"jar": "/path/to/jar"
},
{
"egg": "/path/to/egg"
"jar": "/newpath/to/jar"
},
{
"whl": "/path/to/whl"
"whl": "/newpath/to/whl"
}
]
},
Expand Down
14 changes: 14 additions & 0 deletions acceptance/bundle/variables/complex/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@

>>> $CLI bundle validate -o json

>>> jq .resources.jobs.my_job.tasks[0].task_key out.default.json
"task with spark version 13.2.x-scala2.11 and jar /path/to/jar"

>>> $CLI bundle validate -o json -t dev

>>> jq .resources.jobs.my_job.tasks[0].task_key out.dev.json
"task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar"
policy_id and spark_conf.spark_random fields do not exist in dev target:

>>> jq .resources.jobs.my_job.job_clusters[0].new_cluster.policy_id out.dev.json
null
10 changes: 7 additions & 3 deletions acceptance/bundle/variables/complex/script
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
$CLI bundle validate -o json | jq '{resources,variables}' > out.default.json
trace $CLI bundle validate -o json | jq '{resources,variables}' > out.default.json
trace jq .resources.jobs.my_job.tasks[0].task_key out.default.json | grep "task with spark version 13.2.x-scala2.11 and jar /path/to/jar"

# spark.random and policy_id should be empty in this target:
$CLI bundle validate -o json -t dev | jq '{resources,variables}' > out.dev.json
trace $CLI bundle validate -o json -t dev | jq '{resources,variables}' > out.dev.json
trace jq .resources.jobs.my_job.tasks[0].task_key out.dev.json | grep "task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar"

echo policy_id and spark_conf.spark_random fields do not exist in dev target:
trace jq .resources.jobs.my_job.job_clusters[0].new_cluster.policy_id out.dev.json | grep null
Loading