Skip to content

Commit 1c839f1

Browse files
authored
Convert TestAbortBind to an acceptance test (#3116)
## Why <!-- Why are these changes needed? Provide the context that the reviewer might be missing. For example, were there any decisions behind the change that are not reflected in the code itself? --> One change in the series of changes for converting integration tests into acceptance tests. This will allow for easier testing of various backing solutions for bundle deployment This is a follow-up of the closed not-merged PR #3111
1 parent 8d43117 commit 1c839f1

4 files changed

Lines changed: 84 additions & 45 deletions

File tree

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
2+
=== Create a pre-defined job:
3+
Created job with ID: [NUMID]
4+
5+
=== Expect binding to fail without an auto-approve flag:
6+
Error: failed to bind the resource, err: This bind operation requires user confirmation, but the current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed.
7+
8+
=== Deploy bundle:
9+
>>> [CLI] bundle deploy --force-lock
10+
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bind-job-[UNIQUE_NAME]/files...
11+
Deploying resources...
12+
Updating deployment state...
13+
Deployment complete!
14+
15+
=== Check that job is not bound and not updated with config from bundle:
16+
>>> [CLI] jobs get [NUMID]
17+
{
18+
"job_id": [NUMID],
19+
"settings": {
20+
"name": "test-unbound-job-[UNIQUE_NAME]",
21+
"tasks": [
22+
{
23+
"task_key": "my_notebook_task",
24+
"spark_python_task": {
25+
"python_file": "/Workspace/Users/[USERNAME]/initial_hello_world.py"
26+
}
27+
}
28+
]
29+
}
30+
}
31+
32+
=== Delete the pre-defined job [NUMID]:0
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
cp -r $TESTDIR/../job-spark-python-task/{databricks.yml.tmpl,hello_world.py} .
2+
3+
title "Create a pre-defined job:\n"
4+
5+
PYTHON_FILE="/Workspace/Users/${CURRENT_USER_NAME}/initial_hello_world.py"
6+
7+
JOB_ID=$($CLI jobs create --json '
8+
{
9+
"name": "test-unbound-job-'${UNIQUE_NAME}'",
10+
"tasks": [
11+
{
12+
"task_key": "my_notebook_task",
13+
"new_cluster": {
14+
"spark_version": "'${DEFAULT_SPARK_VERSION}'",
15+
"node_type_id": "'${NODE_TYPE_ID}'",
16+
"num_workers": 1
17+
},
18+
"spark_python_task": {
19+
"python_file": "'${PYTHON_FILE}'"
20+
}
21+
}
22+
]
23+
}' | jq -r '.job_id')
24+
25+
echo "Created job with ID: $JOB_ID"
26+
27+
envsubst < databricks.yml.tmpl > databricks.yml
28+
29+
cleanup() {
30+
title "Delete the pre-defined job $JOB_ID:"
31+
$CLI jobs delete $JOB_ID
32+
echo $?
33+
}
34+
trap cleanup EXIT
35+
36+
title "Expect binding to fail without an auto-approve flag:\n"
37+
trace errcode $CLI bundle deployment bind foo $JOB_ID &> out.bind-result.txt
38+
grep "^Error:" out.bind-result.txt
39+
rm out.bind-result.txt
40+
41+
title "Deploy bundle:"
42+
trace $CLI bundle deploy --force-lock
43+
44+
title "Check that job is not bound and not updated with config from bundle:"
45+
trace $CLI jobs get $JOB_ID | jq '{job_id, settings: {name: .settings.name, tasks: [.settings.tasks[] | {task_key, spark_python_task: .spark_python_task}]}}'
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
Local = true
2+
Cloud = true
3+
4+
Ignore = [
5+
"databricks.yml",
6+
"hello_world.py"
7+
]

integration/bundle/bind_resource_test.go

Lines changed: 0 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -8,58 +8,13 @@ import (
88

99
"github.com/databricks/cli/integration/internal/acc"
1010
"github.com/databricks/cli/internal/testcli"
11-
"github.com/databricks/cli/internal/testutil"
1211
"github.com/databricks/cli/libs/env"
1312
"github.com/databricks/databricks-sdk-go"
1413
"github.com/databricks/databricks-sdk-go/service/jobs"
1514
"github.com/google/uuid"
16-
"github.com/stretchr/testify/assert"
1715
"github.com/stretchr/testify/require"
1816
)
1917

20-
func TestAbortBind(t *testing.T) {
21-
ctx, wt := acc.WorkspaceTest(t)
22-
gt := &generateJobTest{T: wt, w: wt.W}
23-
24-
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
25-
uniqueId := uuid.New().String()
26-
bundleRoot := initTestTemplate(t, ctx, "basic", map[string]any{
27-
"unique_id": uniqueId,
28-
"spark_version": "13.3.x-scala2.12",
29-
"node_type_id": nodeTypeId,
30-
})
31-
32-
jobId := gt.createTestJob(ctx)
33-
t.Cleanup(func() {
34-
gt.destroyJob(ctx, jobId)
35-
destroyBundle(t, ctx, bundleRoot)
36-
})
37-
38-
// Bind should fail because prompting is not possible.
39-
ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot)
40-
ctx = env.Set(ctx, "TERM", "dumb")
41-
c := testcli.NewRunner(t, ctx, "bundle", "deployment", "bind", "foo", strconv.FormatInt(jobId, 10))
42-
43-
// Expect error suggesting to use --auto-approve
44-
_, _, err := c.Run()
45-
assert.ErrorContains(t, err, "failed to bind the resource")
46-
assert.ErrorContains(t, err, "This bind operation requires user confirmation, but the current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
47-
48-
deployBundle(t, ctx, bundleRoot)
49-
50-
w, err := databricks.NewWorkspaceClient()
51-
require.NoError(t, err)
52-
53-
// Check that job is not bound and not updated with config from bundle
54-
job, err := w.Jobs.Get(ctx, jobs.GetJobRequest{
55-
JobId: jobId,
56-
})
57-
require.NoError(t, err)
58-
59-
require.NotEqual(t, job.Settings.Name, "test-job-basic-"+uniqueId)
60-
require.Contains(t, job.Settings.Tasks[0].NotebookTask.NotebookPath, "test")
61-
}
62-
6318
func TestGenerateAndBind(t *testing.T) {
6419
ctx, wt := acc.WorkspaceTest(t)
6520
gt := &generateJobTest{T: wt, w: wt.W}

0 commit comments

Comments
 (0)