Skip to content

Commit 3812c9a

Browse files
authored
Sort job tasks by task key (#3212)
## Changes Sort job tasks by task key. ## Why This is what terraform provider does before deployment. Doing so earlier exposes this changes to the user in bundle validate and make it possible to match requests done by TF and direct backends. https://github.com/databricks/terraform-provider-databricks/blob/0a932c2/jobs/resource_job.go#L343 ## Tests New tests enabled on direct backend.
1 parent 4000e8c commit 3812c9a

13 files changed

Lines changed: 113 additions & 98 deletions

File tree

NEXT_CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,5 +14,6 @@
1414

1515
### Bundles
1616
* Fix default search location for whl artifacts ([#3184](https://github.com/databricks/cli/pull/3184)). This was a regression introduced in 0.255.0.
17+
* The job tasks are now sorted by task key in "bundle validate/summary" output ([#3212](https://github.com/databricks/cli/pull/3212))
1718

1819
### API Changes

acceptance/bundle/artifacts/whl_dynamic/out.test.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,4 @@ Local = true
22
Cloud = false
33

44
[EnvMatrix]
5-
DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
5+
DATABRICKS_CLI_DEPLOYMENT = ["terraform", "direct-exp"]

acceptance/bundle/artifacts/whl_dynamic/test.toml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"] # need to sort tasks by key like terraform does
2-
31
[[Repls]]
42
Old = '\\\\'
53
New = '/'

acceptance/bundle/artifacts/whl_prebuilt_outside_dynamic/out.test.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,4 @@ Local = true
22
Cloud = false
33

44
[EnvMatrix]
5-
DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
5+
DATABRICKS_CLI_DEPLOYMENT = ["terraform", "direct-exp"]
Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1 @@
11
BundleConfig.default_name = ""
2-
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"] # need to sort tasks by key

acceptance/bundle/paths/fallback/output.job.json

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,4 @@
11
[
2-
{
3-
"job_cluster_key": "default",
4-
"notebook_task": {
5-
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook"
6-
},
7-
"task_key": "notebook_example"
8-
},
9-
{
10-
"job_cluster_key": "default",
11-
"spark_python_task": {
12-
"python_file": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file.py"
13-
},
14-
"task_key": "spark_python_example"
15-
},
162
{
173
"dbt_task": {
184
"commands": [
@@ -26,13 +12,10 @@
2612
},
2713
{
2814
"job_cluster_key": "default",
29-
"sql_task": {
30-
"file": {
31-
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/sql.sql"
32-
},
33-
"warehouse_id": "cafef00d"
15+
"notebook_task": {
16+
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook"
3417
},
35-
"task_key": "sql_example"
18+
"task_key": "notebook_example"
3619
},
3720
{
3821
"job_cluster_key": "default",
@@ -63,5 +46,22 @@
6346
"main_class_name": "com.example.Main"
6447
},
6548
"task_key": "spark_jar_example"
49+
},
50+
{
51+
"job_cluster_key": "default",
52+
"spark_python_task": {
53+
"python_file": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file.py"
54+
},
55+
"task_key": "spark_python_example"
56+
},
57+
{
58+
"job_cluster_key": "default",
59+
"sql_task": {
60+
"file": {
61+
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/sql.sql"
62+
},
63+
"warehouse_id": "cafef00d"
64+
},
65+
"task_key": "sql_example"
6666
}
6767
]
Lines changed: 36 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,4 @@
11
[
2-
{
3-
"job_cluster_key": "default",
4-
"notebook_task": {
5-
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook"
6-
},
7-
"task_key": "notebook_example"
8-
},
9-
{
10-
"job_cluster_key": "default",
11-
"spark_python_task": {
12-
"python_file": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file.py"
13-
},
14-
"task_key": "spark_python_example"
15-
},
162
{
173
"dbt_task": {
184
"commands": [
@@ -25,14 +11,33 @@
2511
"task_key": "dbt_example"
2612
},
2713
{
14+
"for_each_task": {
15+
"task": {
16+
"notebook_task": {
17+
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook"
18+
}
19+
}
20+
},
2821
"job_cluster_key": "default",
29-
"sql_task": {
30-
"file": {
31-
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/sql.sql"
32-
},
33-
"warehouse_id": "cafef00d"
22+
"task_key": "for_each_notebook_example"
23+
},
24+
{
25+
"for_each_task": {
26+
"task": {
27+
"job_cluster_key": "default",
28+
"spark_python_task": {
29+
"python_file": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file.py"
30+
}
31+
}
3432
},
35-
"task_key": "sql_example"
33+
"task_key": "for_each_spark_python_example"
34+
},
35+
{
36+
"job_cluster_key": "default",
37+
"notebook_task": {
38+
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook"
39+
},
40+
"task_key": "notebook_example"
3641
},
3742
{
3843
"job_cluster_key": "default",
@@ -65,25 +70,20 @@
6570
"task_key": "spark_jar_example"
6671
},
6772
{
68-
"for_each_task": {
69-
"task": {
70-
"notebook_task": {
71-
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook"
72-
}
73-
}
74-
},
7573
"job_cluster_key": "default",
76-
"task_key": "for_each_notebook_example"
74+
"spark_python_task": {
75+
"python_file": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file.py"
76+
},
77+
"task_key": "spark_python_example"
7778
},
7879
{
79-
"for_each_task": {
80-
"task": {
81-
"job_cluster_key": "default",
82-
"spark_python_task": {
83-
"python_file": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file.py"
84-
}
85-
}
80+
"job_cluster_key": "default",
81+
"sql_task": {
82+
"file": {
83+
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/sql.sql"
84+
},
85+
"warehouse_id": "cafef00d"
8686
},
87-
"task_key": "for_each_spark_python_example"
87+
"task_key": "sql_example"
8888
}
8989
]

acceptance/bundle/templates/default-python/integration_classic/out.validate.dev.json

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -75,24 +75,6 @@
7575
"dev": "[USERNAME]"
7676
},
7777
"tasks": [
78-
{
79-
"job_cluster_key": "job_cluster",
80-
"notebook_task": {
81-
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/dev/files/src/notebook"
82-
},
83-
"task_key": "notebook_task"
84-
},
85-
{
86-
"depends_on": [
87-
{
88-
"task_key": "notebook_task"
89-
}
90-
],
91-
"pipeline_task": {
92-
"pipeline_id": "${resources.pipelines.project_name_[UNIQUE_NAME]_pipeline.id}"
93-
},
94-
"task_key": "refresh_pipeline"
95-
},
9678
{
9779
"depends_on": [
9880
{
@@ -110,6 +92,24 @@
11092
"package_name": "project_name_[UNIQUE_NAME]"
11193
},
11294
"task_key": "main_task"
95+
},
96+
{
97+
"job_cluster_key": "job_cluster",
98+
"notebook_task": {
99+
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/dev/files/src/notebook"
100+
},
101+
"task_key": "notebook_task"
102+
},
103+
{
104+
"depends_on": [
105+
{
106+
"task_key": "notebook_task"
107+
}
108+
],
109+
"pipeline_task": {
110+
"pipeline_id": "${resources.pipelines.project_name_[UNIQUE_NAME]_pipeline.id}"
111+
},
112+
"task_key": "refresh_pipeline"
113113
}
114114
],
115115
"trigger": {

acceptance/bundle/templates/default-python/integration_classic/output.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ Validation OK!
177177
- "dev": "[USERNAME]"
178178
},
179179
"tasks": [
180-
@@ -79,5 +66,5 @@
180+
@@ -97,5 +84,5 @@
181181
"job_cluster_key": "job_cluster",
182182
"notebook_task": {
183183
- "notebook_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/dev/files/src/notebook"
@@ -335,7 +335,7 @@ Resources:
335335
- "dev": "[USERNAME]"
336336
},
337337
"tasks": [
338-
@@ -80,5 +67,5 @@
338+
@@ -98,5 +85,5 @@
339339
"job_cluster_key": "job_cluster",
340340
"notebook_task": {
341341
- "notebook_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/dev/files/src/notebook"

acceptance/bundle/templates/experimental-jobs-as-code/output.txt

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -46,13 +46,6 @@ Warning: Ignoring Databricks CLI version constraint for development build. Requi
4646
"dev": "[USERNAME]"
4747
},
4848
"tasks": [
49-
{
50-
"job_cluster_key": "job_cluster",
51-
"notebook_task": {
52-
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/my_jobs_as_code/dev/files/src/notebook"
53-
},
54-
"task_key": "notebook_task"
55-
},
5649
{
5750
"depends_on": [
5851
{
@@ -70,6 +63,13 @@ Warning: Ignoring Databricks CLI version constraint for development build. Requi
7063
"package_name": "my_jobs_as_code"
7164
},
7265
"task_key": "main_task"
66+
},
67+
{
68+
"job_cluster_key": "job_cluster",
69+
"notebook_task": {
70+
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/my_jobs_as_code/dev/files/src/notebook"
71+
},
72+
"task_key": "notebook_task"
7373
}
7474
],
7575
"trigger": {

0 commit comments

Comments
 (0)