From 88ff42658e8edd9875c29980d9611a69f4f551dd Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 13:22:41 +0100 Subject: [PATCH 01/17] Bump Go SDK to v0.126.0 and remove redundant agent tracking from CLI The Go SDK now includes built-in AI agent detection, appending agent/ to the User-Agent header when running inside a known agent environment. This removes the CLI's own agent-to-UA wiring which would produce duplicate agent/ segments. Hand-written changes: - Remove withAgentInUserAgent() from root command (cmd/root/user_agent_agent.go) - Remove agent.Detect(ctx) from Execute() and delete libs/agent/ (dead code) - Add HTTP-level tests verifying SDK agent detection produces exactly one agent/ segment in the User-Agent header - Update auth prompt logic to use cfg.ConfigType() instead of ErrNotAccountClient/ErrNotWorkspaceClient (the SDK no longer returns these errors after removing host-type validation in favor of host metadata resolution) - Add .well-known/databricks-config handlers to labs test mock servers Generated changes from make generate with updated OpenAPI SHA. Co-authored-by: Isaac --- .codegen/_openapi_sha | 2 +- .gitattributes | 1 + .github/workflows/tagging.yml | 4 +- acceptance/bundle/refschema/out.fields.txt | 21 + .../internal/schema/annotations_openapi.yml | 82 +- .../schema/annotations_openapi_overrides.yml | 4 + .../validation/generated/enum_fields.go | 11 +- .../validation/generated/required_fields.go | 1 + bundle/schema/jsonschema.json | 140 ++- bundle/schema/jsonschema_for_docs.json | 101 ++- cmd/labs/project/installer_test.go | 16 + cmd/root/auth.go | 35 +- cmd/root/auth_test.go | 21 +- cmd/root/root.go | 5 - cmd/root/user_agent_agent.go | 27 - cmd/root/user_agent_agent_test.go | 117 ++- cmd/workspace/cmd.go | 2 + .../data-classification.go | 5 +- cmd/workspace/environments/environments.go | 799 ++++++++++++++++++ .../feature-engineering.go | 44 +- cmd/workspace/genie/genie.go | 15 - cmd/workspace/groups.go | 4 + cmd/workspace/postgres/postgres.go | 154 +++- .../serving-endpoints/serving-endpoints.go | 1 + cmd/workspace/warehouses/warehouses.go | 4 +- go.mod | 2 +- go.sum | 4 +- internal/genkit/tagging.py | 0 libs/agent/agent.go | 88 -- libs/agent/agent_test.go | 69 -- python/databricks/bundles/jobs/__init__.py | 16 + .../bundles/jobs/_models/alert_task.py | 80 ++ .../jobs/_models/alert_task_subscriber.py | 45 + .../bundles/jobs/_models/environment.py | 6 +- .../bundles/jobs/_models/git_source.py | 6 - .../bundles/jobs/_models/sparse_checkout.py | 4 +- .../databricks/bundles/jobs/_models/task.py | 11 + .../databricks/bundles/pipelines/__init__.py | 14 + .../pipelines/_models/connector_type.py | 18 + .../pipelines/_models/data_staging_options.py | 70 ++ .../pipelines/_models/ingestion_config.py | 5 +- .../_models/ingestion_pipeline_definition.py | 42 + 42 files changed, 1750 insertions(+), 346 deletions(-) mode change 100644 => 100755 .github/workflows/tagging.yml delete mode 100644 cmd/root/user_agent_agent.go create mode 100755 cmd/workspace/environments/environments.go mode change 100644 => 100755 internal/genkit/tagging.py delete mode 100644 libs/agent/agent.go delete mode 100644 libs/agent/agent_test.go create mode 100644 python/databricks/bundles/jobs/_models/alert_task.py create mode 100644 python/databricks/bundles/jobs/_models/alert_task_subscriber.py create mode 100644 python/databricks/bundles/pipelines/_models/connector_type.py create mode 100644 python/databricks/bundles/pipelines/_models/data_staging_options.py diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index ddc7b70897..e5a9037912 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -a7c320a6b531263c8fa45619c1565b63849750e5 \ No newline at end of file +d09dbd77f5a9560cbb816746773da43a8bdbde08 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 7cb16bf826..47d09387cc 100644 --- a/.gitattributes +++ b/.gitattributes @@ -90,6 +90,7 @@ cmd/workspace/enable-notebook-table-clipboard/enable-notebook-table-clipboard.go cmd/workspace/enable-results-downloading/enable-results-downloading.go linguist-generated=true cmd/workspace/enhanced-security-monitoring/enhanced-security-monitoring.go linguist-generated=true cmd/workspace/entity-tag-assignments/entity-tag-assignments.go linguist-generated=true +cmd/workspace/environments/environments.go linguist-generated=true cmd/workspace/experiments/experiments.go linguist-generated=true cmd/workspace/external-lineage/external-lineage.go linguist-generated=true cmd/workspace/external-locations/external-locations.go linguist-generated=true diff --git a/.github/workflows/tagging.yml b/.github/workflows/tagging.yml old mode 100644 new mode 100755 index 0752107ac9..9bca7399f3 --- a/.github/workflows/tagging.yml +++ b/.github/workflows/tagging.yml @@ -34,13 +34,13 @@ jobs: steps: - name: Generate GitHub App Token id: generate-token - uses: actions/create-github-app-token@v3 + uses: actions/create-github-app-token@v2 with: app-id: ${{ secrets.DECO_SDK_TAGGING_APP_ID }} private-key: ${{ secrets.DECO_SDK_TAGGING_PRIVATE_KEY }} - name: Checkout repository - uses: actions/checkout@v6 + uses: actions/checkout@v4 with: fetch-depth: 0 token: ${{ steps.generate-token.outputs.token }} diff --git a/acceptance/bundle/refschema/out.fields.txt b/acceptance/bundle/refschema/out.fields.txt index 97dfa44414..d032aa48f8 100644 --- a/acceptance/bundle/refschema/out.fields.txt +++ b/acceptance/bundle/refschema/out.fields.txt @@ -890,6 +890,14 @@ resources.jobs.*.tags map[string]string ALL resources.jobs.*.tags.* string ALL resources.jobs.*.tasks []jobs.Task ALL resources.jobs.*.tasks[*] jobs.Task ALL +resources.jobs.*.tasks[*].alert_task *jobs.AlertTask ALL +resources.jobs.*.tasks[*].alert_task.alert_id string ALL +resources.jobs.*.tasks[*].alert_task.subscribers []jobs.AlertTaskSubscriber ALL +resources.jobs.*.tasks[*].alert_task.subscribers[*] jobs.AlertTaskSubscriber ALL +resources.jobs.*.tasks[*].alert_task.subscribers[*].destination_id string ALL +resources.jobs.*.tasks[*].alert_task.subscribers[*].user_name string ALL +resources.jobs.*.tasks[*].alert_task.warehouse_id string ALL +resources.jobs.*.tasks[*].alert_task.workspace_path string ALL resources.jobs.*.tasks[*].clean_rooms_notebook_task *jobs.CleanRoomsNotebookTask ALL resources.jobs.*.tasks[*].clean_rooms_notebook_task.clean_room_name string ALL resources.jobs.*.tasks[*].clean_rooms_notebook_task.etag string ALL @@ -954,6 +962,14 @@ resources.jobs.*.tasks[*].for_each_task *jobs.ForEachTask ALL resources.jobs.*.tasks[*].for_each_task.concurrency int ALL resources.jobs.*.tasks[*].for_each_task.inputs string ALL resources.jobs.*.tasks[*].for_each_task.task jobs.Task ALL +resources.jobs.*.tasks[*].for_each_task.task.alert_task *jobs.AlertTask ALL +resources.jobs.*.tasks[*].for_each_task.task.alert_task.alert_id string ALL +resources.jobs.*.tasks[*].for_each_task.task.alert_task.subscribers []jobs.AlertTaskSubscriber ALL +resources.jobs.*.tasks[*].for_each_task.task.alert_task.subscribers[*] jobs.AlertTaskSubscriber ALL +resources.jobs.*.tasks[*].for_each_task.task.alert_task.subscribers[*].destination_id string ALL +resources.jobs.*.tasks[*].for_each_task.task.alert_task.subscribers[*].user_name string ALL +resources.jobs.*.tasks[*].for_each_task.task.alert_task.warehouse_id string ALL +resources.jobs.*.tasks[*].for_each_task.task.alert_task.workspace_path string ALL resources.jobs.*.tasks[*].for_each_task.task.clean_rooms_notebook_task *jobs.CleanRoomsNotebookTask ALL resources.jobs.*.tasks[*].for_each_task.task.clean_rooms_notebook_task.clean_room_name string ALL resources.jobs.*.tasks[*].for_each_task.task.clean_rooms_notebook_task.etag string ALL @@ -2228,6 +2244,11 @@ resources.pipelines.*.health pipelines.GetPipelineResponseHealth REMOTE resources.pipelines.*.id string ALL resources.pipelines.*.ingestion_definition *pipelines.IngestionPipelineDefinition ALL resources.pipelines.*.ingestion_definition.connection_name string ALL +resources.pipelines.*.ingestion_definition.connector_type pipelines.ConnectorType ALL +resources.pipelines.*.ingestion_definition.data_staging_options *pipelines.DataStagingOptions ALL +resources.pipelines.*.ingestion_definition.data_staging_options.catalog_name string ALL +resources.pipelines.*.ingestion_definition.data_staging_options.schema_name string ALL +resources.pipelines.*.ingestion_definition.data_staging_options.volume_name string ALL resources.pipelines.*.ingestion_definition.full_refresh_window *pipelines.OperationTimeWindow ALL resources.pipelines.*.ingestion_definition.full_refresh_window.days_of_week []pipelines.DayOfWeek ALL resources.pipelines.*.ingestion_definition.full_refresh_window.days_of_week[*] pipelines.DayOfWeek ALL diff --git a/bundle/internal/schema/annotations_openapi.yml b/bundle/internal/schema/annotations_openapi.yml index a7dc71a8fb..c196c7799a 100644 --- a/bundle/internal/schema/annotations_openapi.yml +++ b/bundle/internal/schema/annotations_openapi.yml @@ -2346,7 +2346,7 @@ github.com/databricks/databricks-sdk-go/service/compute.Environment: "_": "description": |- The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines. - In this minimal environment spec, only pip dependencies are supported. + In this minimal environment spec, only pip and java dependencies are supported. "base_environment": "description": |- The `base_environment` key refers to an `env.yaml` file that specifies an environment version and a collection of dependencies required for the environment setup. @@ -2363,7 +2363,7 @@ github.com/databricks/databricks-sdk-go/service/compute.Environment: Allowed dependencies include a requirement specifier, an archive URL, a local project path (such as WSFS or UC Volumes in Databricks), or a VCS project URL. "environment_version": "description": |- - Required. Environment version used by the environment. + Either `environment_version` or `base_environment` needs to be provided. Environment version used by the environment. Each version comes with a specific Python version and a set of Python packages. The version is a string, consisting of an integer. "java_dependencies": @@ -3047,6 +3047,34 @@ github.com/databricks/databricks-sdk-go/service/iam.PermissionLevel: CAN_CREATE - |- CAN_MONITOR_ONLY + - |- + CAN_CREATE_APP +github.com/databricks/databricks-sdk-go/service/jobs.AlertTask: + "alert_id": + "description": |- + The alert_id is the canonical identifier of the alert. + "subscribers": + "description": |- + The subscribers receive alert evaluation result notifications after the alert task is completed. + The number of subscriptions is limited to 100. + "warehouse_id": + "description": |- + The warehouse_id identifies the warehouse settings used by the alert task. + "workspace_path": + "description": |- + The workspace_path is the path to the alert file in the workspace. The path: + * must start with "/Workspace" + * must be a normalized path. + User has to select only one of alert_id or workspace_path to identify the alert. +github.com/databricks/databricks-sdk-go/service/jobs.AlertTaskSubscriber: + "_": + "description": |- + Represents a subscriber that will receive alert notifications. + A subscriber can be either a user (via email) or a notification destination (via destination_id). + "destination_id": {} + "user_name": + "description": |- + A valid workspace email address. github.com/databricks/databricks-sdk-go/service/jobs.AuthenticationMethod: "_": "enum": @@ -3331,9 +3359,7 @@ github.com/databricks/databricks-sdk-go/service/jobs.GitSource: This field is deprecated "x-databricks-preview": |- PRIVATE - "sparse_checkout": - "x-databricks-preview": |- - PRIVATE + "sparse_checkout": {} github.com/databricks/databricks-sdk-go/service/jobs.JobCluster: "job_cluster_key": "description": |- @@ -3405,7 +3431,7 @@ github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment: "spec": "description": |- The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines. - In this minimal environment spec, only pip dependencies are supported. + In this minimal environment spec, only pip and java dependencies are supported. github.com/databricks/databricks-sdk-go/service/jobs.JobNotificationSettings: "no_alert_for_canceled_runs": "description": |- @@ -3990,6 +4016,9 @@ github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfigura and can be used to wait for a series of table updates before triggering a run. The minimum allowed value is 60 seconds. github.com/databricks/databricks-sdk-go/service/jobs.Task: + "alert_task": + "description": |- + New alert v2 task "clean_rooms_notebook_task": "description": |- The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook @@ -4281,9 +4310,37 @@ github.com/databricks/databricks-sdk-go/service/pipelines.ConnectionParameters: For Oracle databases, this maps to a service name. "x-databricks-preview": |- PRIVATE +github.com/databricks/databricks-sdk-go/service/pipelines.ConnectorType: + "_": + "description": |- + For certain database sources LakeFlow Connect offers both query based and cdc + ingestion, ConnectorType can bse used to convey the type of ingestion. + If connection_name is provided for database sources, we default to Query Based ingestion + "enum": + - |- + CDC + - |- + QUERY_BASED github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger: "quartz_cron_schedule": {} "timezone_id": {} +github.com/databricks/databricks-sdk-go/service/pipelines.DataStagingOptions: + "_": + "description": |- + Location of staged data storage + "catalog_name": + "description": |- + (Required, Immutable) The name of the catalog for the connector's staging storage location. + "schema_name": + "description": |- + (Required, Immutable) The name of the schema for the connector's staging storage location. + "volume_name": + "description": |- + (Optional) The Unity Catalog-compatible name for the storage location. + This is the volume to use for the data that is extracted by the connector. + Spark Declarative Pipelines system will automatically create the volume under the catalog and schema. + For Combined Cdc Managed Ingestion pipelines default name for the volume would be : + __databricks_ingestion_gateway_staging_data-$pipelineId github.com/databricks/databricks-sdk-go/service/pipelines.DayOfWeek: "_": "description": |- @@ -4382,6 +4439,19 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefin pipeline. Under certain conditions, this can be replaced with ingestion_gateway_id to change the connector to Cdc Managed Ingestion Pipeline with Gateway pipeline. + "connector_type": + "description": |- + (Optional) Connector Type for sources. Ex: CDC, Query Based. + "x-databricks-preview": |- + PRIVATE + "data_staging_options": + "description": |- + (Optional) Location of staged data storage. This is required for migration from Cdc Managed Ingestion Pipeline + with Gateway pipeline to Combined Cdc Managed Ingestion Pipeline. + If not specified, the volume for staged data will be created in catalog and schema/target specified in the + top level pipeline definition. + "x-databricks-preview": |- + PRIVATE "full_refresh_window": "description": |- (Optional) A window that specifies a set of time ranges for snapshot queries in CDC. diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index f492ddf88f..611289083e 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -874,6 +874,10 @@ github.com/databricks/databricks-sdk-go/service/database.SyncedTablePosition: "delta_table_sync_info": "description": |- PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/jobs.AlertTaskSubscriber: + "destination_id": + "description": |- + PLACEHOLDER github.com/databricks/databricks-sdk-go/service/jobs.DashboardTask: "dashboard_id": "description": |- diff --git a/bundle/internal/validation/generated/enum_fields.go b/bundle/internal/validation/generated/enum_fields.go index 81cf22dde8..c1e098ed80 100644 --- a/bundle/internal/validation/generated/enum_fields.go +++ b/bundle/internal/validation/generated/enum_fields.go @@ -8,7 +8,7 @@ var EnumFields = map[string][]string{ "artifacts.*.executable": {"bash", "sh", "cmd"}, "artifacts.*.type": {"whl", "jar"}, - "permissions[*].level": {"CAN_ATTACH_TO", "CAN_BIND", "CAN_CREATE", "CAN_EDIT", "CAN_EDIT_METADATA", "CAN_MANAGE", "CAN_MANAGE_PRODUCTION_VERSIONS", "CAN_MANAGE_RUN", "CAN_MANAGE_STAGING_VERSIONS", "CAN_MONITOR", "CAN_MONITOR_ONLY", "CAN_QUERY", "CAN_READ", "CAN_RESTART", "CAN_RUN", "CAN_USE", "CAN_VIEW", "CAN_VIEW_METADATA", "IS_OWNER"}, + "permissions[*].level": {"CAN_ATTACH_TO", "CAN_BIND", "CAN_CREATE", "CAN_CREATE_APP", "CAN_EDIT", "CAN_EDIT_METADATA", "CAN_MANAGE", "CAN_MANAGE_PRODUCTION_VERSIONS", "CAN_MANAGE_RUN", "CAN_MANAGE_STAGING_VERSIONS", "CAN_MONITOR", "CAN_MONITOR_ONLY", "CAN_QUERY", "CAN_READ", "CAN_RESTART", "CAN_RUN", "CAN_USE", "CAN_VIEW", "CAN_VIEW_METADATA", "IS_OWNER"}, "resources.alerts.*.evaluation.comparison_operator": {"EQUAL", "GREATER_THAN", "GREATER_THAN_OR_EQUAL", "IS_NOT_NULL", "IS_NULL", "LESS_THAN", "LESS_THAN_OR_EQUAL", "NOT_EQUAL"}, "resources.alerts.*.evaluation.empty_result_state": {"ERROR", "OK", "TRIGGERED", "UNKNOWN"}, @@ -16,7 +16,7 @@ var EnumFields = map[string][]string{ "resources.alerts.*.evaluation.state": {"ERROR", "OK", "TRIGGERED", "UNKNOWN"}, "resources.alerts.*.evaluation.threshold.column.aggregation": {"AVG", "COUNT", "COUNT_DISTINCT", "MAX", "MEDIAN", "MIN", "STDDEV", "SUM"}, "resources.alerts.*.lifecycle_state": {"ACTIVE", "DELETED"}, - "resources.alerts.*.permissions[*].level": {"CAN_ATTACH_TO", "CAN_BIND", "CAN_CREATE", "CAN_EDIT", "CAN_EDIT_METADATA", "CAN_MANAGE", "CAN_MANAGE_PRODUCTION_VERSIONS", "CAN_MANAGE_RUN", "CAN_MANAGE_STAGING_VERSIONS", "CAN_MONITOR", "CAN_MONITOR_ONLY", "CAN_QUERY", "CAN_READ", "CAN_RESTART", "CAN_RUN", "CAN_USE", "CAN_VIEW", "CAN_VIEW_METADATA", "IS_OWNER"}, + "resources.alerts.*.permissions[*].level": {"CAN_ATTACH_TO", "CAN_BIND", "CAN_CREATE", "CAN_CREATE_APP", "CAN_EDIT", "CAN_EDIT_METADATA", "CAN_MANAGE", "CAN_MANAGE_PRODUCTION_VERSIONS", "CAN_MANAGE_RUN", "CAN_MANAGE_STAGING_VERSIONS", "CAN_MONITOR", "CAN_MONITOR_ONLY", "CAN_QUERY", "CAN_READ", "CAN_RESTART", "CAN_RUN", "CAN_USE", "CAN_VIEW", "CAN_VIEW_METADATA", "IS_OWNER"}, "resources.alerts.*.schedule.pause_status": {"PAUSED", "UNPAUSED"}, "resources.apps.*.active_deployment.mode": {"AUTO_SYNC", "SNAPSHOT"}, @@ -50,9 +50,9 @@ var EnumFields = map[string][]string{ "resources.clusters.*.runtime_engine": {"NULL", "PHOTON", "STANDARD"}, "resources.dashboards.*.lifecycle_state": {"ACTIVE", "TRASHED"}, - "resources.dashboards.*.permissions[*].level": {"CAN_ATTACH_TO", "CAN_BIND", "CAN_CREATE", "CAN_EDIT", "CAN_EDIT_METADATA", "CAN_MANAGE", "CAN_MANAGE_PRODUCTION_VERSIONS", "CAN_MANAGE_RUN", "CAN_MANAGE_STAGING_VERSIONS", "CAN_MONITOR", "CAN_MONITOR_ONLY", "CAN_QUERY", "CAN_READ", "CAN_RESTART", "CAN_RUN", "CAN_USE", "CAN_VIEW", "CAN_VIEW_METADATA", "IS_OWNER"}, + "resources.dashboards.*.permissions[*].level": {"CAN_ATTACH_TO", "CAN_BIND", "CAN_CREATE", "CAN_CREATE_APP", "CAN_EDIT", "CAN_EDIT_METADATA", "CAN_MANAGE", "CAN_MANAGE_PRODUCTION_VERSIONS", "CAN_MANAGE_RUN", "CAN_MANAGE_STAGING_VERSIONS", "CAN_MONITOR", "CAN_MONITOR_ONLY", "CAN_QUERY", "CAN_READ", "CAN_RESTART", "CAN_RUN", "CAN_USE", "CAN_VIEW", "CAN_VIEW_METADATA", "IS_OWNER"}, - "resources.database_instances.*.permissions[*].level": {"CAN_ATTACH_TO", "CAN_BIND", "CAN_CREATE", "CAN_EDIT", "CAN_EDIT_METADATA", "CAN_MANAGE", "CAN_MANAGE_PRODUCTION_VERSIONS", "CAN_MANAGE_RUN", "CAN_MANAGE_STAGING_VERSIONS", "CAN_MONITOR", "CAN_MONITOR_ONLY", "CAN_QUERY", "CAN_READ", "CAN_RESTART", "CAN_RUN", "CAN_USE", "CAN_VIEW", "CAN_VIEW_METADATA", "IS_OWNER"}, + "resources.database_instances.*.permissions[*].level": {"CAN_ATTACH_TO", "CAN_BIND", "CAN_CREATE", "CAN_CREATE_APP", "CAN_EDIT", "CAN_EDIT_METADATA", "CAN_MANAGE", "CAN_MANAGE_PRODUCTION_VERSIONS", "CAN_MANAGE_RUN", "CAN_MANAGE_STAGING_VERSIONS", "CAN_MONITOR", "CAN_MONITOR_ONLY", "CAN_QUERY", "CAN_READ", "CAN_RESTART", "CAN_RUN", "CAN_USE", "CAN_VIEW", "CAN_VIEW_METADATA", "IS_OWNER"}, "resources.database_instances.*.state": {"AVAILABLE", "DELETING", "FAILING_OVER", "STARTING", "STOPPED", "UPDATING"}, "resources.experiments.*.permissions[*].level": {"CAN_EDIT", "CAN_MANAGE", "CAN_READ"}, @@ -144,6 +144,7 @@ var EnumFields = map[string][]string{ "resources.pipelines.*.clusters[*].azure_attributes.availability": {"ON_DEMAND_AZURE", "SPOT_AZURE", "SPOT_WITH_FALLBACK_AZURE"}, "resources.pipelines.*.clusters[*].gcp_attributes.availability": {"ON_DEMAND_GCP", "PREEMPTIBLE_GCP", "PREEMPTIBLE_WITH_FALLBACK_GCP"}, "resources.pipelines.*.deployment.kind": {"BUNDLE"}, + "resources.pipelines.*.ingestion_definition.connector_type": {"CDC", "QUERY_BASED"}, "resources.pipelines.*.ingestion_definition.full_refresh_window.days_of_week[*]": {"FRIDAY", "MONDAY", "SATURDAY", "SUNDAY", "THURSDAY", "TUESDAY", "WEDNESDAY"}, "resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.scd_type": {"APPEND_ONLY", "SCD_TYPE_1", "SCD_TYPE_2"}, "resources.pipelines.*.ingestion_definition.objects[*].schema.table_configuration.scd_type": {"APPEND_ONLY", "SCD_TYPE_1", "SCD_TYPE_2"}, @@ -155,7 +156,7 @@ var EnumFields = map[string][]string{ "resources.postgres_endpoints.*.endpoint_type": {"ENDPOINT_TYPE_READ_ONLY", "ENDPOINT_TYPE_READ_WRITE"}, - "resources.postgres_projects.*.permissions[*].level": {"CAN_ATTACH_TO", "CAN_BIND", "CAN_CREATE", "CAN_EDIT", "CAN_EDIT_METADATA", "CAN_MANAGE", "CAN_MANAGE_PRODUCTION_VERSIONS", "CAN_MANAGE_RUN", "CAN_MANAGE_STAGING_VERSIONS", "CAN_MONITOR", "CAN_MONITOR_ONLY", "CAN_QUERY", "CAN_READ", "CAN_RESTART", "CAN_RUN", "CAN_USE", "CAN_VIEW", "CAN_VIEW_METADATA", "IS_OWNER"}, + "resources.postgres_projects.*.permissions[*].level": {"CAN_ATTACH_TO", "CAN_BIND", "CAN_CREATE", "CAN_CREATE_APP", "CAN_EDIT", "CAN_EDIT_METADATA", "CAN_MANAGE", "CAN_MANAGE_PRODUCTION_VERSIONS", "CAN_MANAGE_RUN", "CAN_MANAGE_STAGING_VERSIONS", "CAN_MONITOR", "CAN_MONITOR_ONLY", "CAN_QUERY", "CAN_READ", "CAN_RESTART", "CAN_RUN", "CAN_USE", "CAN_VIEW", "CAN_VIEW_METADATA", "IS_OWNER"}, "resources.quality_monitors.*.custom_metrics[*].type": {"CUSTOM_METRIC_TYPE_AGGREGATE", "CUSTOM_METRIC_TYPE_DERIVED", "CUSTOM_METRIC_TYPE_DRIFT"}, "resources.quality_monitors.*.inference_log.problem_type": {"PROBLEM_TYPE_CLASSIFICATION", "PROBLEM_TYPE_REGRESSION"}, diff --git a/bundle/internal/validation/generated/required_fields.go b/bundle/internal/validation/generated/required_fields.go index 7505453f8e..d90345f83f 100644 --- a/bundle/internal/validation/generated/required_fields.go +++ b/bundle/internal/validation/generated/required_fields.go @@ -201,6 +201,7 @@ var RequiredFields = map[string][]string{ "resources.pipelines.*.clusters[*].init_scripts[*].workspace": {"destination"}, "resources.pipelines.*.deployment": {"kind"}, "resources.pipelines.*.gateway_definition": {"connection_name", "gateway_storage_catalog", "gateway_storage_schema"}, + "resources.pipelines.*.ingestion_definition.data_staging_options": {"catalog_name", "schema_name"}, "resources.pipelines.*.ingestion_definition.full_refresh_window": {"start_hour"}, "resources.pipelines.*.ingestion_definition.objects[*].report": {"destination_catalog", "destination_schema", "source_url"}, "resources.pipelines.*.ingestion_definition.objects[*].report.table_configuration.auto_full_refresh_policy": {"enabled"}, diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index a34b3d231e..abfdb55233 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -4563,7 +4563,7 @@ "oneOf": [ { "type": "object", - "description": "The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines.\nIn this minimal environment spec, only pip dependencies are supported.", + "description": "The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines.\nIn this minimal environment spec, only pip and java dependencies are supported.", "properties": { "base_environment": { "description": "The `base_environment` key refers to an `env.yaml` file that specifies an environment version and a collection of dependencies required for the environment setup.\nThis `env.yaml` file may itself include a `base_environment` reference pointing to another `env_1.yaml` file. However, when used as a base environment, `env_1.yaml` (or further nested references) will not be processed or included in the final environment, meaning that the resolution of `base_environment` references is not recursive.", @@ -4580,7 +4580,7 @@ "$ref": "#/$defs/slice/string" }, "environment_version": { - "description": "Required. Environment version used by the environment.\nEach version comes with a specific Python version and a set of Python packages.\nThe version is a string, consisting of an integer.", + "description": "Either `environment_version` or `base_environment` needs to be provided. Environment version used by the environment.\nEach version comes with a specific Python version and a set of Python packages.\nThe version is a string, consisting of an integer.", "$ref": "#/$defs/string" }, "java_dependencies": { @@ -5439,7 +5439,8 @@ "CAN_QUERY", "CAN_MONITOR", "CAN_CREATE", - "CAN_MONITOR_ONLY" + "CAN_MONITOR_ONLY", + "CAN_CREATE_APP" ] }, { @@ -5448,6 +5449,58 @@ } ] }, + "jobs.AlertTask": { + "oneOf": [ + { + "type": "object", + "properties": { + "alert_id": { + "description": "The alert_id is the canonical identifier of the alert.", + "$ref": "#/$defs/string" + }, + "subscribers": { + "description": "The subscribers receive alert evaluation result notifications after the alert task is completed.\nThe number of subscriptions is limited to 100.", + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.AlertTaskSubscriber" + }, + "warehouse_id": { + "description": "The warehouse_id identifies the warehouse settings used by the alert task.", + "$ref": "#/$defs/string" + }, + "workspace_path": { + "description": "The workspace_path is the path to the alert file in the workspace. The path:\n* must start with \"/Workspace\"\n* must be a normalized path.\nUser has to select only one of alert_id or workspace_path to identify the alert.", + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "jobs.AlertTaskSubscriber": { + "oneOf": [ + { + "type": "object", + "description": "Represents a subscriber that will receive alert notifications.\nA subscriber can be either a user (via email) or a notification destination (via destination_id).", + "properties": { + "destination_id": { + "$ref": "#/$defs/string" + }, + "user_name": { + "description": "A valid workspace email address.", + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "jobs.AuthenticationMethod": { "oneOf": [ { @@ -5973,9 +6026,7 @@ "$ref": "#/$defs/string" }, "sparse_checkout": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparseCheckout", - "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparseCheckout" } }, "additionalProperties": false, @@ -7204,6 +7255,10 @@ { "type": "object", "properties": { + "alert_task": { + "description": "New alert v2 task", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.AlertTask" + }, "clean_rooms_notebook_task": { "description": "The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook\nwhen the `clean_rooms_notebook_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CleanRoomsNotebookTask" @@ -7696,6 +7751,22 @@ } ] }, + "pipelines.ConnectorType": { + "oneOf": [ + { + "type": "string", + "description": "For certain database sources LakeFlow Connect offers both query based and cdc\ningestion, ConnectorType can bse used to convey the type of ingestion.\nIf connection_name is provided for database sources, we default to Query Based ingestion", + "enum": [ + "CDC", + "QUERY_BASED" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "pipelines.CronTrigger": { "oneOf": [ { @@ -7716,6 +7787,37 @@ } ] }, + "pipelines.DataStagingOptions": { + "oneOf": [ + { + "type": "object", + "description": "Location of staged data storage", + "properties": { + "catalog_name": { + "description": "(Required, Immutable) The name of the catalog for the connector's staging storage location.", + "$ref": "#/$defs/string" + }, + "schema_name": { + "description": "(Required, Immutable) The name of the schema for the connector's staging storage location.", + "$ref": "#/$defs/string" + }, + "volume_name": { + "description": "(Optional) The Unity Catalog-compatible name for the storage location.\nThis is the volume to use for the data that is extracted by the connector.\nSpark Declarative Pipelines system will automatically create the volume under the catalog and schema.\nFor Combined Cdc Managed Ingestion pipelines default name for the volume would be :\n__databricks_ingestion_gateway_staging_data-$pipelineId", + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": [ + "catalog_name", + "schema_name" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "pipelines.DayOfWeek": { "oneOf": [ { @@ -7901,6 +8003,18 @@ "description": "The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with\nboth connectors for applications like Salesforce, Workday, and so on, and also database connectors like Oracle,\n(connector_type = QUERY_BASED OR connector_type = CDC).\nIf connection name corresponds to database connectors like Oracle, and connector_type is not provided then\nconnector_type defaults to QUERY_BASED. If connector_type is passed as CDC we use Combined Cdc Managed Ingestion\npipeline.\nUnder certain conditions, this can be replaced with ingestion_gateway_id to change the connector to Cdc Managed\nIngestion Pipeline with Gateway pipeline.", "$ref": "#/$defs/string" }, + "connector_type": { + "description": "(Optional) Connector Type for sources. Ex: CDC, Query Based.", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ConnectorType", + "x-databricks-preview": "PRIVATE", + "doNotSuggest": true + }, + "data_staging_options": { + "description": "(Optional) Location of staged data storage. This is required for migration from Cdc Managed Ingestion Pipeline\nwith Gateway pipeline to Combined Cdc Managed Ingestion Pipeline.\nIf not specified, the volume for staged data will be created in catalog and schema/target specified in the\ntop level pipeline definition.", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.DataStagingOptions", + "x-databricks-preview": "PRIVATE", + "doNotSuggest": true + }, "full_refresh_window": { "description": "(Optional) A window that specifies a set of time ranges for snapshot queries in CDC.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.OperationTimeWindow" @@ -11293,6 +11407,20 @@ } ] }, + "jobs.AlertTaskSubscriber": { + "oneOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.AlertTaskSubscriber" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "jobs.JobCluster": { "oneOf": [ { diff --git a/bundle/schema/jsonschema_for_docs.json b/bundle/schema/jsonschema_for_docs.json index 2291b411d7..bcb6866296 100644 --- a/bundle/schema/jsonschema_for_docs.json +++ b/bundle/schema/jsonschema_for_docs.json @@ -2152,7 +2152,8 @@ }, "engine": { "description": "The deployment engine to use. Valid values are `terraform` and `direct`. Takes priority over `DATABRICKS_BUNDLE_ENGINE` environment variable. Default is \"terraform\".", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/engine.EngineType" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/engine.EngineType", + "x-since-version": "v0.295.0" }, "git": { "description": "The Git version control details that are associated with your bundle.", @@ -4096,7 +4097,7 @@ }, "compute.Environment": { "type": "object", - "description": "The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines.\nIn this minimal environment spec, only pip dependencies are supported.", + "description": "The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines.\nIn this minimal environment spec, only pip and java dependencies are supported.", "properties": { "base_environment": { "description": "The `base_environment` key refers to an `env.yaml` file that specifies an environment version and a collection of dependencies required for the environment setup.\nThis `env.yaml` file may itself include a `base_environment` reference pointing to another `env_1.yaml` file. However, when used as a base environment, `env_1.yaml` (or further nested references) will not be processed or included in the final environment, meaning that the resolution of `base_environment` references is not recursive.", @@ -4116,7 +4117,7 @@ "x-since-version": "v0.229.0" }, "environment_version": { - "description": "Required. Environment version used by the environment.\nEach version comes with a specific Python version and a set of Python packages.\nThe version is a string, consisting of an integer.", + "description": "Either `environment_version` or `base_environment` needs to be provided. Environment version used by the environment.\nEach version comes with a specific Python version and a set of Python packages.\nThe version is a string, consisting of an integer.", "$ref": "#/$defs/string", "x-since-version": "v0.252.0" }, @@ -4743,9 +4744,46 @@ "CAN_QUERY", "CAN_MONITOR", "CAN_CREATE", - "CAN_MONITOR_ONLY" + "CAN_MONITOR_ONLY", + "CAN_CREATE_APP" ] }, + "jobs.AlertTask": { + "type": "object", + "properties": { + "alert_id": { + "description": "The alert_id is the canonical identifier of the alert.", + "$ref": "#/$defs/string" + }, + "subscribers": { + "description": "The subscribers receive alert evaluation result notifications after the alert task is completed.\nThe number of subscriptions is limited to 100.", + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.AlertTaskSubscriber" + }, + "warehouse_id": { + "description": "The warehouse_id identifies the warehouse settings used by the alert task.", + "$ref": "#/$defs/string" + }, + "workspace_path": { + "description": "The workspace_path is the path to the alert file in the workspace. The path:\n* must start with \"/Workspace\"\n* must be a normalized path.\nUser has to select only one of alert_id or workspace_path to identify the alert.", + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false + }, + "jobs.AlertTaskSubscriber": { + "type": "object", + "description": "Represents a subscriber that will receive alert notifications.\nA subscriber can be either a user (via email) or a notification destination (via destination_id).", + "properties": { + "destination_id": { + "$ref": "#/$defs/string" + }, + "user_name": { + "description": "A valid workspace email address.", + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false + }, "jobs.AuthenticationMethod": { "type": "string", "enum": [ @@ -5169,8 +5207,6 @@ }, "sparse_checkout": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparseCheckout", - "x-databricks-preview": "PRIVATE", - "doNotSuggest": true, "x-since-version": "v0.290.0" } }, @@ -6121,6 +6157,10 @@ "jobs.Task": { "type": "object", "properties": { + "alert_task": { + "description": "New alert v2 task", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.AlertTask" + }, "clean_rooms_notebook_task": { "description": "The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook\nwhen the `clean_rooms_notebook_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CleanRoomsNotebookTask", @@ -6569,6 +6609,14 @@ }, "additionalProperties": false }, + "pipelines.ConnectorType": { + "type": "string", + "description": "For certain database sources LakeFlow Connect offers both query based and cdc\ningestion, ConnectorType can bse used to convey the type of ingestion.\nIf connection_name is provided for database sources, we default to Query Based ingestion", + "enum": [ + "CDC", + "QUERY_BASED" + ] + }, "pipelines.CronTrigger": { "type": "object", "properties": { @@ -6583,6 +6631,29 @@ }, "additionalProperties": false }, + "pipelines.DataStagingOptions": { + "type": "object", + "description": "Location of staged data storage", + "properties": { + "catalog_name": { + "description": "(Required, Immutable) The name of the catalog for the connector's staging storage location.", + "$ref": "#/$defs/string" + }, + "schema_name": { + "description": "(Required, Immutable) The name of the schema for the connector's staging storage location.", + "$ref": "#/$defs/string" + }, + "volume_name": { + "description": "(Optional) The Unity Catalog-compatible name for the storage location.\nThis is the volume to use for the data that is extracted by the connector.\nSpark Declarative Pipelines system will automatically create the volume under the catalog and schema.\nFor Combined Cdc Managed Ingestion pipelines default name for the volume would be :\n__databricks_ingestion_gateway_staging_data-$pipelineId", + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": [ + "catalog_name", + "schema_name" + ] + }, "pipelines.DayOfWeek": { "type": "string", "description": "Days of week in which the window is allowed to happen.\nIf not specified all days of the week will be used.", @@ -6726,6 +6797,18 @@ "$ref": "#/$defs/string", "x-since-version": "v0.229.0" }, + "connector_type": { + "description": "(Optional) Connector Type for sources. Ex: CDC, Query Based.", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ConnectorType", + "x-databricks-preview": "PRIVATE", + "doNotSuggest": true + }, + "data_staging_options": { + "description": "(Optional) Location of staged data storage. This is required for migration from Cdc Managed Ingestion Pipeline\nwith Gateway pipeline to Combined Cdc Managed Ingestion Pipeline.\nIf not specified, the volume for staged data will be created in catalog and schema/target specified in the\ntop level pipeline definition.", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.DataStagingOptions", + "x-databricks-preview": "PRIVATE", + "doNotSuggest": true + }, "full_refresh_window": { "description": "(Optional) A window that specifies a set of time ranges for snapshot queries in CDC.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.OperationTimeWindow", @@ -9148,6 +9231,12 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.DatabaseInstanceRef" } }, + "jobs.AlertTaskSubscriber": { + "type": "array", + "items": { + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.AlertTaskSubscriber" + } + }, "jobs.JobCluster": { "type": "array", "items": { diff --git a/cmd/labs/project/installer_test.go b/cmd/labs/project/installer_test.go index 9c19f95748..86752b7047 100644 --- a/cmd/labs/project/installer_test.go +++ b/cmd/labs/project/installer_test.go @@ -168,6 +168,10 @@ func TestInstallerWorksForReleases(t *testing.T) { }) }() server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/.well-known/databricks-config" { + w.WriteHeader(http.StatusNotFound) + return + } if r.URL.Path == "/databrickslabs/blueprint/v0.3.15/labs.yml" { raw, err := os.ReadFile("testdata/installed-in-home/.databricks/labs/blueprint/lib/labs.yml") assert.NoError(t, err) @@ -248,6 +252,10 @@ func TestOfflineInstallerWorksForReleases(t *testing.T) { // run databricks labs install --offline=true // it will look for the code in the same install directory and if present, install from there. server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/.well-known/databricks-config" { + w.WriteHeader(http.StatusNotFound) + return + } if r.URL.Path == "/api/2.1/clusters/get" { respondWithJSON(t, w, &compute.ClusterDetails{ State: compute.StateRunning, @@ -291,6 +299,10 @@ func TestInstallerWorksForDevelopment(t *testing.T) { }() server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/.well-known/databricks-config" { + w.WriteHeader(http.StatusNotFound) + return + } if r.URL.Path == "/api/2.1/clusters/list" { respondWithJSON(t, w, compute.ListClustersResponse{ Clusters: []compute.ClusterDetails{ @@ -416,6 +428,10 @@ func TestUpgraderWorksForReleases(t *testing.T) { }) }() server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/.well-known/databricks-config" { + w.WriteHeader(http.StatusNotFound) + return + } if r.URL.Path == "/databrickslabs/blueprint/v0.4.0/labs.yml" { raw, err := os.ReadFile("testdata/installed-in-home/.databricks/labs/blueprint/lib/labs.yml") assert.NoError(t, err) diff --git a/cmd/root/auth.go b/cmd/root/auth.go index a47587267a..ec8fc17a87 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -56,16 +56,15 @@ func accountClientOrPrompt(ctx context.Context, cfg *config.Config, allowPrompt err = a.Config.Authenticate(emptyHttpRequest(ctx)) } - prompt := false - if allowPrompt && err != nil && cmdio.IsPromptSupported(ctx) { - // Prompt to select a profile if the current configuration is not an account client. - prompt = prompt || errors.Is(err, databricks.ErrNotAccountClient) - // Prompt to select a profile if the current configuration doesn't resolve to a credential provider. - prompt = prompt || errors.Is(err, config.ErrCannotConfigureDefault) - } - - if !prompt { - // If we are not prompting, we can return early. + // Determine if we should prompt for a profile. The SDK no longer returns + // ErrNotAccountClient from NewAccountClient (as of v0.125.0, host-type + // validation was removed in favor of host metadata resolution). Use + // ConfigType() to detect wrong host type. + needsPrompt := cfg.ConfigType() != config.AccountConfig || + cfg.AccountID == "" || + (err != nil && errors.Is(err, config.ErrCannotConfigureDefault)) + + if !needsPrompt || !allowPrompt || !cmdio.IsPromptSupported(ctx) { return a, err } @@ -158,16 +157,14 @@ func workspaceClientOrPrompt(ctx context.Context, cfg *config.Config, allowPromp err = w.Config.Authenticate(emptyHttpRequest(ctx)) } - prompt := false - if allowPrompt && err != nil && cmdio.IsPromptSupported(ctx) { - // Prompt to select a profile if the current configuration is not a workspace client. - prompt = prompt || errors.Is(err, databricks.ErrNotWorkspaceClient) - // Prompt to select a profile if the current configuration doesn't resolve to a credential provider. - prompt = prompt || errors.Is(err, config.ErrCannotConfigureDefault) - } + // Determine if we should prompt for a profile. The SDK no longer returns + // ErrNotWorkspaceClient from NewWorkspaceClient (as of v0.125.0, host-type + // validation was removed in favor of host metadata resolution). Use + // ConfigType() to detect wrong host type. + needsPrompt := cfg.ConfigType() != config.WorkspaceConfig || + (err != nil && errors.Is(err, config.ErrCannotConfigureDefault)) - if !prompt { - // If we are not prompting, we can return early. + if !needsPrompt || !allowPrompt || !cmdio.IsPromptSupported(ctx) { return w, err } diff --git a/cmd/root/auth_test.go b/cmd/root/auth_test.go index 11534d44e0..dc6930b9fa 100644 --- a/cmd/root/auth_test.go +++ b/cmd/root/auth_test.go @@ -2,6 +2,7 @@ package root import ( "context" + "net/http" "os" "path/filepath" "testing" @@ -15,6 +16,12 @@ import ( "github.com/stretchr/testify/require" ) +// noNetworkTransport prevents real HTTP calls in auth tests. +// Returns 404 for host metadata lookups; 200 for everything else. +var noNetworkTransport = roundTripperFunc(func(r *http.Request) (*http.Response, error) { + return &http.Response{StatusCode: http.StatusNotFound, Body: http.NoBody}, nil +}) + func TestEmptyHttpRequest(t *testing.T) { ctx, cancel := context.WithCancel(t.Context()) defer cancel() @@ -32,7 +39,10 @@ var workspacePromptFn = func(ctx context.Context, cfg *config.Config, retry bool return workspaceClientOrPrompt(ctx, cfg, retry) } -func expectPrompts(t *testing.T, fn promptFn, config *config.Config) { +func expectPrompts(t *testing.T, fn promptFn, cfg *config.Config) { + // Prevent real HTTP calls during auth resolution. + cfg.HTTPTransport = noNetworkTransport + ctx, cancel := context.WithTimeout(t.Context(), 1*time.Second) defer cancel() @@ -43,7 +53,7 @@ func expectPrompts(t *testing.T, fn promptFn, config *config.Config) { go func() { defer close(errch) defer cancel() - _, err := fn(ctx, config, true) + _, err := fn(ctx, cfg, true) errch <- err }() @@ -57,12 +67,15 @@ func expectPrompts(t *testing.T, fn promptFn, config *config.Config) { } } -func expectReturns(t *testing.T, fn promptFn, config *config.Config) { +func expectReturns(t *testing.T, fn promptFn, cfg *config.Config) { + // Prevent real HTTP calls during auth resolution. + cfg.HTTPTransport = noNetworkTransport + ctx, cancel := context.WithTimeout(t.Context(), 1*time.Second) defer cancel() ctx, _ = cmdio.SetupTest(ctx, cmdio.TestOptions{PromptSupported: true}) - client, err := fn(ctx, config, true) + client, err := fn(ctx, cfg, true) require.NoError(t, err) require.NotNil(t, client) } diff --git a/cmd/root/root.go b/cmd/root/root.go index f10768f21c..e29d6df83c 100644 --- a/cmd/root/root.go +++ b/cmd/root/root.go @@ -12,7 +12,6 @@ import ( "time" "github.com/databricks/cli/internal/build" - "github.com/databricks/cli/libs/agent" "github.com/databricks/cli/libs/auth" "github.com/databricks/cli/libs/cmdctx" "github.com/databricks/cli/libs/cmdio" @@ -79,7 +78,6 @@ func New(ctx context.Context) *cobra.Command { ctx = withCommandInUserAgent(ctx, cmd) ctx = withCommandExecIdInUserAgent(ctx) ctx = withUpstreamInUserAgent(ctx) - ctx = withAgentInUserAgent(ctx) ctx = withInteractiveModeInUserAgent(ctx) ctx = InjectTestPidToUserAgent(ctx) cmd.SetContext(ctx) @@ -139,9 +137,6 @@ Stack Trace: // Detect if the CLI is running on DBR and store this on the context. ctx = dbr.DetectRuntime(ctx) - // Detect if the CLI is running under an agent. - ctx = agent.Detect(ctx) - // Set a command execution ID value in the context ctx = cmdctx.GenerateExecId(ctx) diff --git a/cmd/root/user_agent_agent.go b/cmd/root/user_agent_agent.go deleted file mode 100644 index 739b05bdee..0000000000 --- a/cmd/root/user_agent_agent.go +++ /dev/null @@ -1,27 +0,0 @@ -// This file integrates agent detection with the user agent string. -// -// The actual detection logic is in libs/agent. This file simply retrieves -// the detected agent name from the context and adds it to the user agent. -// -// Example user agent strings: -// - With Claude Code: "cli/X.Y.Z ... agent/claude-code ..." -// - No agent: "cli/X.Y.Z ..." (no agent tag) -package root - -import ( - "context" - - "github.com/databricks/cli/libs/agent" - "github.com/databricks/databricks-sdk-go/useragent" -) - -// Key in the user agent -const agentKey = "agent" - -func withAgentInUserAgent(ctx context.Context) context.Context { - product := agent.Product(ctx) - if product == "" { - return ctx - } - return useragent.InContext(ctx, agentKey, product) -} diff --git a/cmd/root/user_agent_agent_test.go b/cmd/root/user_agent_agent_test.go index 0673839c35..46e7fc538e 100644 --- a/cmd/root/user_agent_agent_test.go +++ b/cmd/root/user_agent_agent_test.go @@ -1,37 +1,108 @@ package root import ( + "net/http" + "os" + "strings" "testing" - "github.com/databricks/cli/libs/agent" + "github.com/databricks/databricks-sdk-go/config" "github.com/databricks/databricks-sdk-go/useragent" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) -func TestAgentInUserAgent(t *testing.T) { - for _, product := range []string{ - agent.Antigravity, - agent.ClaudeCode, - agent.Cline, - agent.Codex, - agent.Cursor, - agent.GeminiCLI, - agent.OpenCode, - } { - t.Run(product, func(t *testing.T) { - ctx := t.Context() - ctx = agent.Mock(ctx, product) - - ctx = withAgentInUserAgent(ctx) - assert.Contains(t, useragent.FromContext(ctx), "agent/"+product) - }) +// All known agent env vars. Must be unset in tests to avoid interference +// from the host environment (e.g., running tests inside Claude Code). +var agentEnvVars = []string{ + "ANTIGRAVITY_AGENT", + "CLAUDECODE", + "CLINE_ACTIVE", + "CODEX_CI", + "COPILOT_CLI", + "CURSOR_AGENT", + "GEMINI_CLI", + "OPENCLAW_SHELL", + "OPENCODE", +} + +// unsetAgentEnv removes all known agent env vars from the environment. +// The SDK uses os.LookupEnv, so setting to empty is not enough; the vars +// must be fully unset. +func unsetAgentEnv(t *testing.T) { + t.Helper() + for _, v := range agentEnvVars { + original, exists := os.LookupEnv(v) + os.Unsetenv(v) + if exists { + t.Cleanup(func() { os.Setenv(v, original) }) + } + } +} + +// captureUserAgent makes an HTTP request through the SDK and returns the +// captured User-Agent header string. +func captureUserAgent(t *testing.T) string { + t.Helper() + + var capturedUA string + cfg := &config.Config{ + Host: "https://test.databricks.com", + Token: "test-token", + HTTPTransport: roundTripperFunc(func(r *http.Request) (*http.Response, error) { + capturedUA = r.Header.Get("User-Agent") + return &http.Response{StatusCode: 200, Body: http.NoBody}, nil + }), } + + client, err := cfg.NewApiClient() + require.NoError(t, err) + + _ = client.Do(t.Context(), "GET", "/api/2.0/clusters/list") + return capturedUA +} + +// TestSDKAgentDetection verifies the SDK adds agent/ to the User-Agent +// header when exactly one agent env var is set. +func TestSDKAgentDetection(t *testing.T) { + unsetAgentEnv(t) + useragent.ClearCache() + t.Cleanup(useragent.ClearCache) + + t.Setenv("CLAUDECODE", "1") + + ua := captureUserAgent(t) + assert.Contains(t, ua, "agent/claude-code") + assert.Equal(t, 1, strings.Count(ua, "agent/"), "expected exactly one agent/ segment") +} + +// TestSDKNoAgentDetected verifies no agent/ segment is added when no agent +// env vars are set. +func TestSDKNoAgentDetected(t *testing.T) { + unsetAgentEnv(t) + useragent.ClearCache() + t.Cleanup(useragent.ClearCache) + + ua := captureUserAgent(t) + assert.NotContains(t, ua, "agent/") +} + +// TestSDKMultipleAgentsSuppressed verifies no agent/ segment is added when +// multiple agent env vars are set (ambiguity guard). +func TestSDKMultipleAgentsSuppressed(t *testing.T) { + unsetAgentEnv(t) + useragent.ClearCache() + t.Cleanup(useragent.ClearCache) + + t.Setenv("CLAUDECODE", "1") + t.Setenv("CURSOR_AGENT", "1") + + ua := captureUserAgent(t) + assert.NotContains(t, ua, "agent/") } -func TestAgentNotSet(t *testing.T) { - ctx := t.Context() - ctx = agent.Mock(ctx, "") +type roundTripperFunc func(*http.Request) (*http.Response, error) - ctx = withAgentInUserAgent(ctx) - assert.NotContains(t, useragent.FromContext(ctx), "agent/") +func (f roundTripperFunc) RoundTrip(r *http.Request) (*http.Response, error) { + return f(r) } diff --git a/cmd/workspace/cmd.go b/cmd/workspace/cmd.go index 26ed4f5ce2..f8d5e67262 100755 --- a/cmd/workspace/cmd.go +++ b/cmd/workspace/cmd.go @@ -35,6 +35,7 @@ import ( data_sources "github.com/databricks/cli/cmd/workspace/data-sources" database "github.com/databricks/cli/cmd/workspace/database" entity_tag_assignments "github.com/databricks/cli/cmd/workspace/entity-tag-assignments" + environments "github.com/databricks/cli/cmd/workspace/environments" experiments "github.com/databricks/cli/cmd/workspace/experiments" external_lineage "github.com/databricks/cli/cmd/workspace/external-lineage" external_locations "github.com/databricks/cli/cmd/workspace/external-locations" @@ -162,6 +163,7 @@ func All() []*cobra.Command { out = append(out, data_sources.New()) out = append(out, database.New()) out = append(out, entity_tag_assignments.New()) + out = append(out, environments.New()) out = append(out, experiments.New()) out = append(out, external_lineage.New()) out = append(out, external_locations.New()) diff --git a/cmd/workspace/data-classification/data-classification.go b/cmd/workspace/data-classification/data-classification.go index 29056a9bc8..8e13587e2e 100755 --- a/cmd/workspace/data-classification/data-classification.go +++ b/cmd/workspace/data-classification/data-classification.go @@ -27,10 +27,7 @@ func New() *cobra.Command { tables. Each catalog can have at most one configuration resource that controls scanning behavior and auto-tagging rules.`, GroupID: "dataclassification", - - // This service is being previewed; hide from help output. - Hidden: true, - RunE: root.ReportUnknownSubcommand, + RunE: root.ReportUnknownSubcommand, } // Add methods diff --git a/cmd/workspace/environments/environments.go b/cmd/workspace/environments/environments.go new file mode 100755 index 0000000000..e227871d7a --- /dev/null +++ b/cmd/workspace/environments/environments.go @@ -0,0 +1,799 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package environments + +import ( + "fmt" + "strings" + "time" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdctx" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/common/types/fieldmask" + "github.com/databricks/databricks-sdk-go/experimental/api" + "github.com/databricks/databricks-sdk-go/service/environments" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "environments", + Short: `APIs to manage environment resources.`, + Long: `APIs to manage environment resources. + + The Environments API provides management capabilities for different types of + environments including workspace-level base environments that define the + environment version and dependencies to be used in serverless notebooks and + jobs.`, + GroupID: "environments", + RunE: root.ReportUnknownSubcommand, + } + + // Add methods + cmd.AddCommand(newCreateWorkspaceBaseEnvironment()) + cmd.AddCommand(newDeleteWorkspaceBaseEnvironment()) + cmd.AddCommand(newGetDefaultWorkspaceBaseEnvironment()) + cmd.AddCommand(newGetOperation()) + cmd.AddCommand(newGetWorkspaceBaseEnvironment()) + cmd.AddCommand(newListWorkspaceBaseEnvironments()) + cmd.AddCommand(newRefreshWorkspaceBaseEnvironment()) + cmd.AddCommand(newUpdateDefaultWorkspaceBaseEnvironment()) + cmd.AddCommand(newUpdateWorkspaceBaseEnvironment()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create-workspace-base-environment command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createWorkspaceBaseEnvironmentOverrides []func( + *cobra.Command, + *environments.CreateWorkspaceBaseEnvironmentRequest, +) + +func newCreateWorkspaceBaseEnvironment() *cobra.Command { + cmd := &cobra.Command{} + + var createWorkspaceBaseEnvironmentReq environments.CreateWorkspaceBaseEnvironmentRequest + createWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment = environments.WorkspaceBaseEnvironment{} + var createWorkspaceBaseEnvironmentJson flags.JsonFlag + + var createWorkspaceBaseEnvironmentSkipWait bool + var createWorkspaceBaseEnvironmentTimeout time.Duration + + cmd.Flags().BoolVar(&createWorkspaceBaseEnvironmentSkipWait, "no-wait", createWorkspaceBaseEnvironmentSkipWait, `do not wait to reach DONE state`) + cmd.Flags().DurationVar(&createWorkspaceBaseEnvironmentTimeout, "timeout", 0, `maximum amount of time to reach DONE state`) + + cmd.Flags().Var(&createWorkspaceBaseEnvironmentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&createWorkspaceBaseEnvironmentReq.RequestId, "request-id", createWorkspaceBaseEnvironmentReq.RequestId, `A unique identifier for this request.`) + cmd.Flags().StringVar(&createWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironmentId, "workspace-base-environment-id", createWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironmentId, `The ID to use for the workspace base environment, which will become the final component of the resource name.`) + cmd.Flags().Var(&createWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment.BaseEnvironmentType, "base-environment-type", `The type of base environment (CPU or GPU). Supported values: [CPU, GPU]`) + cmd.Flags().StringVar(&createWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment.Filepath, "filepath", createWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment.Filepath, `The WSFS or UC Volumes path to the environment YAML file.`) + cmd.Flags().StringVar(&createWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment.Name, "name", createWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment.Name, `The resource name of the workspace base environment.`) + + cmd.Use = "create-workspace-base-environment DISPLAY_NAME" + cmd.Short = `Create a workspace base environment.` + cmd.Long = `Create a workspace base environment. + + Creates a new WorkspaceBaseEnvironment. This is a long-running operation. The + operation will asynchronously generate a materialized environment to optimize + dependency resolution and is only marked as done when the materialized + environment has been successfully generated or has failed. + + This is a long-running operation. By default, the command waits for the + operation to complete. Use --no-wait to return immediately with the raw + operation details. The operation's 'name' field can then be used to poll for + completion using the get-operation command. + + Arguments: + DISPLAY_NAME: Human-readable display name for the workspace base environment.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(0)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'display_name' in your JSON input") + } + return nil + } + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createWorkspaceBaseEnvironmentJson.Unmarshal(&createWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnostics(ctx, diags) + if err != nil { + return err + } + } + } + if !cmd.Flags().Changed("json") { + createWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment.DisplayName = args[0] + } + + // Determine which mode to execute based on flags. + switch { + case createWorkspaceBaseEnvironmentSkipWait: + wait, err := w.Environments.CreateWorkspaceBaseEnvironment(ctx, createWorkspaceBaseEnvironmentReq) + if err != nil { + return err + } + + // Return operation immediately without waiting. + operation, err := w.Environments.GetOperation(ctx, environments.GetOperationRequest{ + Name: wait.Name(), + }) + if err != nil { + return err + } + return cmdio.Render(ctx, operation) + + default: + wait, err := w.Environments.CreateWorkspaceBaseEnvironment(ctx, createWorkspaceBaseEnvironmentReq) + if err != nil { + return err + } + + // Show spinner while waiting for completion. + sp := cmdio.NewSpinner(ctx) + sp.Update("Waiting for create-workspace-base-environment to complete...") + + // Wait for completion. + opts := api.WithTimeout(createWorkspaceBaseEnvironmentTimeout) + response, err := wait.Wait(ctx, opts) + if err != nil { + return err + } + sp.Close() + return cmdio.Render(ctx, response) + } + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createWorkspaceBaseEnvironmentOverrides { + fn(cmd, &createWorkspaceBaseEnvironmentReq) + } + + return cmd +} + +// start delete-workspace-base-environment command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteWorkspaceBaseEnvironmentOverrides []func( + *cobra.Command, + *environments.DeleteWorkspaceBaseEnvironmentRequest, +) + +func newDeleteWorkspaceBaseEnvironment() *cobra.Command { + cmd := &cobra.Command{} + + var deleteWorkspaceBaseEnvironmentReq environments.DeleteWorkspaceBaseEnvironmentRequest + + cmd.Use = "delete-workspace-base-environment NAME" + cmd.Short = `Delete a workspace base environment.` + cmd.Long = `Delete a workspace base environment. + + Deletes a WorkspaceBaseEnvironment. Deleting a base environment may impact + linked notebooks and jobs. This operation is irreversible and should be + performed only when you are certain the environment is no longer needed. + + Arguments: + NAME: Required. The resource name of the workspace base environment to delete. + Format: workspace-base-environments/{workspace_base_environment}` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + deleteWorkspaceBaseEnvironmentReq.Name = args[0] + + err = w.Environments.DeleteWorkspaceBaseEnvironment(ctx, deleteWorkspaceBaseEnvironmentReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteWorkspaceBaseEnvironmentOverrides { + fn(cmd, &deleteWorkspaceBaseEnvironmentReq) + } + + return cmd +} + +// start get-default-workspace-base-environment command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getDefaultWorkspaceBaseEnvironmentOverrides []func( + *cobra.Command, + *environments.GetDefaultWorkspaceBaseEnvironmentRequest, +) + +func newGetDefaultWorkspaceBaseEnvironment() *cobra.Command { + cmd := &cobra.Command{} + + var getDefaultWorkspaceBaseEnvironmentReq environments.GetDefaultWorkspaceBaseEnvironmentRequest + + cmd.Use = "get-default-workspace-base-environment NAME" + cmd.Short = `Get the default workspace base environment configuration.` + cmd.Long = `Get the default workspace base environment configuration. + + Gets the default WorkspaceBaseEnvironment configuration for the workspace. + Returns the current default base environment settings for both CPU and GPU + compute. + + Arguments: + NAME: A static resource name of the default workspace base environment. Format: + default-workspace-base-environment` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + getDefaultWorkspaceBaseEnvironmentReq.Name = args[0] + + response, err := w.Environments.GetDefaultWorkspaceBaseEnvironment(ctx, getDefaultWorkspaceBaseEnvironmentReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getDefaultWorkspaceBaseEnvironmentOverrides { + fn(cmd, &getDefaultWorkspaceBaseEnvironmentReq) + } + + return cmd +} + +// start get-operation command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOperationOverrides []func( + *cobra.Command, + *environments.GetOperationRequest, +) + +func newGetOperation() *cobra.Command { + cmd := &cobra.Command{} + + var getOperationReq environments.GetOperationRequest + + cmd.Use = "get-operation NAME" + cmd.Short = `Get the status of a long-running operation.` + cmd.Long = `Get the status of a long-running operation. + + Gets the status of a long-running operation. Clients can use this method to + poll the operation result. + + Arguments: + NAME: The name of the operation resource.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + getOperationReq.Name = args[0] + + response, err := w.Environments.GetOperation(ctx, getOperationReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOperationOverrides { + fn(cmd, &getOperationReq) + } + + return cmd +} + +// start get-workspace-base-environment command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getWorkspaceBaseEnvironmentOverrides []func( + *cobra.Command, + *environments.GetWorkspaceBaseEnvironmentRequest, +) + +func newGetWorkspaceBaseEnvironment() *cobra.Command { + cmd := &cobra.Command{} + + var getWorkspaceBaseEnvironmentReq environments.GetWorkspaceBaseEnvironmentRequest + + cmd.Use = "get-workspace-base-environment NAME" + cmd.Short = `Get a workspace base environment.` + cmd.Long = `Get a workspace base environment. + + Retrieves a WorkspaceBaseEnvironment by its name. + + Arguments: + NAME: Required. The resource name of the workspace base environment to retrieve. + Format: workspace-base-environments/{workspace_base_environment}` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + getWorkspaceBaseEnvironmentReq.Name = args[0] + + response, err := w.Environments.GetWorkspaceBaseEnvironment(ctx, getWorkspaceBaseEnvironmentReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getWorkspaceBaseEnvironmentOverrides { + fn(cmd, &getWorkspaceBaseEnvironmentReq) + } + + return cmd +} + +// start list-workspace-base-environments command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listWorkspaceBaseEnvironmentsOverrides []func( + *cobra.Command, + *environments.ListWorkspaceBaseEnvironmentsRequest, +) + +func newListWorkspaceBaseEnvironments() *cobra.Command { + cmd := &cobra.Command{} + + var listWorkspaceBaseEnvironmentsReq environments.ListWorkspaceBaseEnvironmentsRequest + + cmd.Flags().IntVar(&listWorkspaceBaseEnvironmentsReq.PageSize, "page-size", listWorkspaceBaseEnvironmentsReq.PageSize, `The maximum number of environments to return per page.`) + cmd.Flags().StringVar(&listWorkspaceBaseEnvironmentsReq.PageToken, "page-token", listWorkspaceBaseEnvironmentsReq.PageToken, `Page token for pagination.`) + + cmd.Use = "list-workspace-base-environments" + cmd.Short = `List workspace base environments.` + cmd.Long = `List workspace base environments. + + Lists all WorkspaceBaseEnvironments in the workspace.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + response := w.Environments.ListWorkspaceBaseEnvironments(ctx, listWorkspaceBaseEnvironmentsReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listWorkspaceBaseEnvironmentsOverrides { + fn(cmd, &listWorkspaceBaseEnvironmentsReq) + } + + return cmd +} + +// start refresh-workspace-base-environment command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var refreshWorkspaceBaseEnvironmentOverrides []func( + *cobra.Command, + *environments.RefreshWorkspaceBaseEnvironmentRequest, +) + +func newRefreshWorkspaceBaseEnvironment() *cobra.Command { + cmd := &cobra.Command{} + + var refreshWorkspaceBaseEnvironmentReq environments.RefreshWorkspaceBaseEnvironmentRequest + + var refreshWorkspaceBaseEnvironmentSkipWait bool + var refreshWorkspaceBaseEnvironmentTimeout time.Duration + + cmd.Flags().BoolVar(&refreshWorkspaceBaseEnvironmentSkipWait, "no-wait", refreshWorkspaceBaseEnvironmentSkipWait, `do not wait to reach DONE state`) + cmd.Flags().DurationVar(&refreshWorkspaceBaseEnvironmentTimeout, "timeout", 0, `maximum amount of time to reach DONE state`) + + cmd.Use = "refresh-workspace-base-environment NAME" + cmd.Short = `Refresh materialized workspace base environment.` + cmd.Long = `Refresh materialized workspace base environment. + + Refreshes the materialized environment for a WorkspaceBaseEnvironment. This is + a long-running operation. The operation will asynchronously regenerate the + materialized environment and is only marked as done when the materialized + environment has been successfully generated or has failed. The existing + materialized environment remains available until it expires. + + This is a long-running operation. By default, the command waits for the + operation to complete. Use --no-wait to return immediately with the raw + operation details. The operation's 'name' field can then be used to poll for + completion using the get-operation command. + + Arguments: + NAME: Required. The resource name of the workspace base environment to delete. + Format: workspace-base-environments/{workspace_base_environment}` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + refreshWorkspaceBaseEnvironmentReq.Name = args[0] + + // Determine which mode to execute based on flags. + switch { + case refreshWorkspaceBaseEnvironmentSkipWait: + wait, err := w.Environments.RefreshWorkspaceBaseEnvironment(ctx, refreshWorkspaceBaseEnvironmentReq) + if err != nil { + return err + } + + // Return operation immediately without waiting. + operation, err := w.Environments.GetOperation(ctx, environments.GetOperationRequest{ + Name: wait.Name(), + }) + if err != nil { + return err + } + return cmdio.Render(ctx, operation) + + default: + wait, err := w.Environments.RefreshWorkspaceBaseEnvironment(ctx, refreshWorkspaceBaseEnvironmentReq) + if err != nil { + return err + } + + // Show spinner while waiting for completion. + sp := cmdio.NewSpinner(ctx) + sp.Update("Waiting for refresh-workspace-base-environment to complete...") + + // Wait for completion. + opts := api.WithTimeout(refreshWorkspaceBaseEnvironmentTimeout) + response, err := wait.Wait(ctx, opts) + if err != nil { + return err + } + sp.Close() + return cmdio.Render(ctx, response) + } + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range refreshWorkspaceBaseEnvironmentOverrides { + fn(cmd, &refreshWorkspaceBaseEnvironmentReq) + } + + return cmd +} + +// start update-default-workspace-base-environment command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateDefaultWorkspaceBaseEnvironmentOverrides []func( + *cobra.Command, + *environments.UpdateDefaultWorkspaceBaseEnvironmentRequest, +) + +func newUpdateDefaultWorkspaceBaseEnvironment() *cobra.Command { + cmd := &cobra.Command{} + + var updateDefaultWorkspaceBaseEnvironmentReq environments.UpdateDefaultWorkspaceBaseEnvironmentRequest + updateDefaultWorkspaceBaseEnvironmentReq.DefaultWorkspaceBaseEnvironment = environments.DefaultWorkspaceBaseEnvironment{} + var updateDefaultWorkspaceBaseEnvironmentJson flags.JsonFlag + + cmd.Flags().Var(&updateDefaultWorkspaceBaseEnvironmentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&updateDefaultWorkspaceBaseEnvironmentReq.DefaultWorkspaceBaseEnvironment.CpuWorkspaceBaseEnvironment, "cpu-workspace-base-environment", updateDefaultWorkspaceBaseEnvironmentReq.DefaultWorkspaceBaseEnvironment.CpuWorkspaceBaseEnvironment, `The default workspace base environment for CPU compute.`) + cmd.Flags().StringVar(&updateDefaultWorkspaceBaseEnvironmentReq.DefaultWorkspaceBaseEnvironment.GpuWorkspaceBaseEnvironment, "gpu-workspace-base-environment", updateDefaultWorkspaceBaseEnvironmentReq.DefaultWorkspaceBaseEnvironment.GpuWorkspaceBaseEnvironment, `The default workspace base environment for GPU compute.`) + cmd.Flags().StringVar(&updateDefaultWorkspaceBaseEnvironmentReq.DefaultWorkspaceBaseEnvironment.Name, "name", updateDefaultWorkspaceBaseEnvironmentReq.DefaultWorkspaceBaseEnvironment.Name, `The resource name of this singleton resource.`) + + cmd.Use = "update-default-workspace-base-environment NAME UPDATE_MASK" + cmd.Short = `Update the default workspace base environment configuration.` + cmd.Long = `Update the default workspace base environment configuration. + + Updates the default WorkspaceBaseEnvironment configuration for the workspace. + Sets the specified base environments as the workspace defaults for CPU and/or + GPU compute. + + Arguments: + NAME: The resource name of this singleton resource. Format: + default-workspace-base-environment + UPDATE_MASK: Field mask specifying which fields to update. Use comma as the separator + for multiple fields (no space). The special value '*' indicates that all + fields should be updated (full replacement). Valid field paths: + cpu_workspace_base_environment, gpu_workspace_base_environment + + To unset one or both defaults, include the field path(s) in the mask and + omit them from the request body. To unset both, you must list both paths + explicitly — the wildcard '*' cannot be used to unset fields.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateDefaultWorkspaceBaseEnvironmentJson.Unmarshal(&updateDefaultWorkspaceBaseEnvironmentReq.DefaultWorkspaceBaseEnvironment) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnostics(ctx, diags) + if err != nil { + return err + } + } + } + updateDefaultWorkspaceBaseEnvironmentReq.Name = args[0] + if args[1] != "" { + updateMaskArray := strings.Split(args[1], ",") + updateDefaultWorkspaceBaseEnvironmentReq.UpdateMask = *fieldmask.New(updateMaskArray) + } + + response, err := w.Environments.UpdateDefaultWorkspaceBaseEnvironment(ctx, updateDefaultWorkspaceBaseEnvironmentReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateDefaultWorkspaceBaseEnvironmentOverrides { + fn(cmd, &updateDefaultWorkspaceBaseEnvironmentReq) + } + + return cmd +} + +// start update-workspace-base-environment command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateWorkspaceBaseEnvironmentOverrides []func( + *cobra.Command, + *environments.UpdateWorkspaceBaseEnvironmentRequest, +) + +func newUpdateWorkspaceBaseEnvironment() *cobra.Command { + cmd := &cobra.Command{} + + var updateWorkspaceBaseEnvironmentReq environments.UpdateWorkspaceBaseEnvironmentRequest + updateWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment = environments.WorkspaceBaseEnvironment{} + var updateWorkspaceBaseEnvironmentJson flags.JsonFlag + + var updateWorkspaceBaseEnvironmentSkipWait bool + var updateWorkspaceBaseEnvironmentTimeout time.Duration + + cmd.Flags().BoolVar(&updateWorkspaceBaseEnvironmentSkipWait, "no-wait", updateWorkspaceBaseEnvironmentSkipWait, `do not wait to reach DONE state`) + cmd.Flags().DurationVar(&updateWorkspaceBaseEnvironmentTimeout, "timeout", 0, `maximum amount of time to reach DONE state`) + + cmd.Flags().Var(&updateWorkspaceBaseEnvironmentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().Var(&updateWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment.BaseEnvironmentType, "base-environment-type", `The type of base environment (CPU or GPU). Supported values: [CPU, GPU]`) + cmd.Flags().StringVar(&updateWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment.Filepath, "filepath", updateWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment.Filepath, `The WSFS or UC Volumes path to the environment YAML file.`) + cmd.Flags().StringVar(&updateWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment.Name, "name", updateWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment.Name, `The resource name of the workspace base environment.`) + + cmd.Use = "update-workspace-base-environment NAME DISPLAY_NAME" + cmd.Short = `Update a workspace base environment.` + cmd.Long = `Update a workspace base environment. + + Updates an existing WorkspaceBaseEnvironment. This is a long-running + operation. The operation will asynchronously regenerate the materialized + environment and is only marked as done when the materialized environment has + been successfully generated or has failed. The existing materialized + environment remains available until it expires. + + This is a long-running operation. By default, the command waits for the + operation to complete. Use --no-wait to return immediately with the raw + operation details. The operation's 'name' field can then be used to poll for + completion using the get-operation command. + + Arguments: + NAME: + DISPLAY_NAME: Human-readable display name for the workspace base environment.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(1)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, provide only NAME as positional arguments. Provide 'display_name' in your JSON input") + } + return nil + } + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateWorkspaceBaseEnvironmentJson.Unmarshal(&updateWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnostics(ctx, diags) + if err != nil { + return err + } + } + } + updateWorkspaceBaseEnvironmentReq.Name = args[0] + if !cmd.Flags().Changed("json") { + updateWorkspaceBaseEnvironmentReq.WorkspaceBaseEnvironment.DisplayName = args[1] + } + + // Determine which mode to execute based on flags. + switch { + case updateWorkspaceBaseEnvironmentSkipWait: + wait, err := w.Environments.UpdateWorkspaceBaseEnvironment(ctx, updateWorkspaceBaseEnvironmentReq) + if err != nil { + return err + } + + // Return operation immediately without waiting. + operation, err := w.Environments.GetOperation(ctx, environments.GetOperationRequest{ + Name: wait.Name(), + }) + if err != nil { + return err + } + return cmdio.Render(ctx, operation) + + default: + wait, err := w.Environments.UpdateWorkspaceBaseEnvironment(ctx, updateWorkspaceBaseEnvironmentReq) + if err != nil { + return err + } + + // Show spinner while waiting for completion. + sp := cmdio.NewSpinner(ctx) + sp.Update("Waiting for update-workspace-base-environment to complete...") + + // Wait for completion. + opts := api.WithTimeout(updateWorkspaceBaseEnvironmentTimeout) + response, err := wait.Wait(ctx, opts) + if err != nil { + return err + } + sp.Close() + return cmdio.Render(ctx, response) + } + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateWorkspaceBaseEnvironmentOverrides { + fn(cmd, &updateWorkspaceBaseEnvironmentReq) + } + + return cmd +} + +// end service Environments diff --git a/cmd/workspace/feature-engineering/feature-engineering.go b/cmd/workspace/feature-engineering/feature-engineering.go index c0dc7e939a..5d1e537507 100755 --- a/cmd/workspace/feature-engineering/feature-engineering.go +++ b/cmd/workspace/feature-engineering/feature-engineering.go @@ -75,11 +75,14 @@ func newCreateFeature() *cobra.Command { cmd.Flags().Var(&createFeatureJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().StringVar(&createFeatureReq.Feature.Description, "description", createFeatureReq.Feature.Description, `The description of the feature.`) + // TODO: array: entities cmd.Flags().StringVar(&createFeatureReq.Feature.FilterCondition, "filter-condition", createFeatureReq.Feature.FilterCondition, `Deprecated: Use DeltaTableSource.filter_condition or KafkaSource.filter_condition instead.`) + // TODO: array: inputs // TODO: complex arg: lineage_context // TODO: complex arg: time_window + // TODO: complex arg: timeseries_column - cmd.Use = "create-feature FULL_NAME SOURCE INPUTS FUNCTION" + cmd.Use = "create-feature FULL_NAME SOURCE FUNCTION" cmd.Short = `Create a feature.` cmd.Long = `Create a feature. @@ -88,8 +91,6 @@ func newCreateFeature() *cobra.Command { Arguments: FULL_NAME: The full three-part name (catalog, schema, name) of the feature. SOURCE: The data source of the feature. - INPUTS: Deprecated: Use AggregationFunction.inputs instead. Kept for backwards - compatibility. The input columns from which the feature is computed. FUNCTION: The function by which the feature is computed.` cmd.Annotations = make(map[string]string) @@ -98,11 +99,11 @@ func newCreateFeature() *cobra.Command { if cmd.Flags().Changed("json") { err := root.ExactArgs(0)(cmd, args) if err != nil { - return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'full_name', 'source', 'inputs', 'function' in your JSON input") + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'full_name', 'source', 'function' in your JSON input") } return nil } - check := root.ExactArgs(4) + check := root.ExactArgs(3) return check(cmd, args) } @@ -134,16 +135,9 @@ func newCreateFeature() *cobra.Command { } if !cmd.Flags().Changed("json") { - _, err = fmt.Sscan(args[2], &createFeatureReq.Feature.Inputs) - if err != nil { - return fmt.Errorf("invalid INPUTS: %s", args[2]) - } - - } - if !cmd.Flags().Changed("json") { - _, err = fmt.Sscan(args[3], &createFeatureReq.Feature.Function) + _, err = fmt.Sscan(args[2], &createFeatureReq.Feature.Function) if err != nil { - return fmt.Errorf("invalid FUNCTION: %s", args[3]) + return fmt.Errorf("invalid FUNCTION: %s", args[2]) } } @@ -871,11 +865,14 @@ func newUpdateFeature() *cobra.Command { cmd.Flags().Var(&updateFeatureJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().StringVar(&updateFeatureReq.Feature.Description, "description", updateFeatureReq.Feature.Description, `The description of the feature.`) + // TODO: array: entities cmd.Flags().StringVar(&updateFeatureReq.Feature.FilterCondition, "filter-condition", updateFeatureReq.Feature.FilterCondition, `Deprecated: Use DeltaTableSource.filter_condition or KafkaSource.filter_condition instead.`) + // TODO: array: inputs // TODO: complex arg: lineage_context // TODO: complex arg: time_window + // TODO: complex arg: timeseries_column - cmd.Use = "update-feature FULL_NAME UPDATE_MASK SOURCE INPUTS FUNCTION" + cmd.Use = "update-feature FULL_NAME UPDATE_MASK SOURCE FUNCTION" cmd.Short = `Update a feature's description (all other fields are immutable).` cmd.Long = `Update a feature's description (all other fields are immutable). @@ -885,8 +882,6 @@ func newUpdateFeature() *cobra.Command { FULL_NAME: The full three-part name (catalog, schema, name) of the feature. UPDATE_MASK: The list of fields to update. SOURCE: The data source of the feature. - INPUTS: Deprecated: Use AggregationFunction.inputs instead. Kept for backwards - compatibility. The input columns from which the feature is computed. FUNCTION: The function by which the feature is computed.` cmd.Annotations = make(map[string]string) @@ -895,11 +890,11 @@ func newUpdateFeature() *cobra.Command { if cmd.Flags().Changed("json") { err := root.ExactArgs(2)(cmd, args) if err != nil { - return fmt.Errorf("when --json flag is specified, provide only FULL_NAME, UPDATE_MASK as positional arguments. Provide 'full_name', 'source', 'inputs', 'function' in your JSON input") + return fmt.Errorf("when --json flag is specified, provide only FULL_NAME, UPDATE_MASK as positional arguments. Provide 'full_name', 'source', 'function' in your JSON input") } return nil } - check := root.ExactArgs(5) + check := root.ExactArgs(4) return check(cmd, args) } @@ -930,16 +925,9 @@ func newUpdateFeature() *cobra.Command { } if !cmd.Flags().Changed("json") { - _, err = fmt.Sscan(args[3], &updateFeatureReq.Feature.Inputs) - if err != nil { - return fmt.Errorf("invalid INPUTS: %s", args[3]) - } - - } - if !cmd.Flags().Changed("json") { - _, err = fmt.Sscan(args[4], &updateFeatureReq.Feature.Function) + _, err = fmt.Sscan(args[3], &updateFeatureReq.Feature.Function) if err != nil { - return fmt.Errorf("invalid FUNCTION: %s", args[4]) + return fmt.Errorf("invalid FUNCTION: %s", args[3]) } } diff --git a/cmd/workspace/genie/genie.go b/cmd/workspace/genie/genie.go index 07975cbd7d..e6fdb48004 100755 --- a/cmd/workspace/genie/genie.go +++ b/cmd/workspace/genie/genie.go @@ -621,9 +621,6 @@ func newGenieCreateEvalRun() *cobra.Command { SPACE_ID: The ID associated with the Genie space where the evaluations will be executed.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -695,9 +692,6 @@ func newGenieGetEvalResultDetails() *cobra.Command { EVAL_RUN_ID: The unique identifier for the evaluation run. RESULT_ID: The unique identifier for the evaluation result.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -758,9 +752,6 @@ func newGenieGetEvalRun() *cobra.Command { located. EVAL_RUN_ID: ` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -823,9 +814,6 @@ func newGenieListEvalResults() *cobra.Command { located. EVAL_RUN_ID: The unique identifier for the evaluation run.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -887,9 +875,6 @@ func newGenieListEvalRuns() *cobra.Command { SPACE_ID: The ID associated with the Genie space where the evaluation run is located.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { diff --git a/cmd/workspace/groups.go b/cmd/workspace/groups.go index 27dca16729..8dd096ee63 100644 --- a/cmd/workspace/groups.go +++ b/cmd/workspace/groups.go @@ -104,5 +104,9 @@ func Groups() []cobra.Group { ID: "postgres", Title: "Postgres", }, + { + ID: "environments", + Title: "Environments", + }, } } diff --git a/cmd/workspace/postgres/postgres.go b/cmd/workspace/postgres/postgres.go index a8a2e6904d..f098764d03 100755 --- a/cmd/workspace/postgres/postgres.go +++ b/cmd/workspace/postgres/postgres.go @@ -68,6 +68,7 @@ func New() *cobra.Command { cmd.AddCommand(newUpdateDatabase()) cmd.AddCommand(newUpdateEndpoint()) cmd.AddCommand(newUpdateProject()) + cmd.AddCommand(newUpdateRole()) // Apply optional overrides to this command. for _, fn := range cmdOverrides { @@ -606,8 +607,8 @@ func newCreateRole() *cobra.Command { // TODO: complex arg: status cmd.Use = "create-role PARENT" - cmd.Short = `Create a postgres role for a branch.` - cmd.Long = `Create a postgres role for a branch. + cmd.Short = `Create a Postgres Role for a Branch.` + cmd.Long = `Create a Postgres Role for a Branch. Creates a new Postgres role in the branch. @@ -620,9 +621,6 @@ func newCreateRole() *cobra.Command { PARENT: The Branch where this Role is created. Format: projects/{project_id}/branches/{branch_id}` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -1123,8 +1121,8 @@ func newDeleteRole() *cobra.Command { cmd.Flags().StringVar(&deleteRoleReq.ReassignOwnedTo, "reassign-owned-to", deleteRoleReq.ReassignOwnedTo, `Reassign objects.`) cmd.Use = "delete-role NAME" - cmd.Short = `Delete a postgres role in a branch.` - cmd.Long = `Delete a postgres role in a branch. + cmd.Short = `Delete a Postgres Role from a Branch.` + cmd.Long = `Delete a Postgres Role from a Branch. Deletes the specified Postgres role. @@ -1137,9 +1135,6 @@ func newDeleteRole() *cobra.Command { NAME: The full resource path of the role to delete. Format: projects/{project_id}/branches/{branch_id}/roles/{role_id}` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -1588,8 +1583,8 @@ func newGetRole() *cobra.Command { var getRoleReq postgres.GetRoleRequest cmd.Use = "get-role NAME" - cmd.Short = `Get a postgres role in a branch.` - cmd.Long = `Get a postgres role in a branch. + cmd.Short = `Get a Postgres Role for a Branch.` + cmd.Long = `Get a Postgres Role for a Branch. Retrieves information about the specified Postgres role, including its authentication method and permissions. @@ -1598,9 +1593,6 @@ func newGetRole() *cobra.Command { NAME: The full resource path of the role to retrieve. Format: projects/{project_id}/branches/{branch_id}/roles/{role_id}` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -1878,8 +1870,8 @@ func newListRoles() *cobra.Command { cmd.Flags().StringVar(&listRolesReq.PageToken, "page-token", listRolesReq.PageToken, `Page token from a previous response.`) cmd.Use = "list-roles PARENT" - cmd.Short = `List postgres roles in a branch.` - cmd.Long = `List postgres roles in a branch. + cmd.Short = `List Postgres Roles for a Branch.` + cmd.Long = `List Postgres Roles for a Branch. Returns a paginated list of Postgres roles in the branch. @@ -1887,9 +1879,6 @@ func newListRoles() *cobra.Command { PARENT: The Branch that owns this collection of roles. Format: projects/{project_id}/branches/{branch_id}` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -2424,4 +2413,129 @@ func newUpdateProject() *cobra.Command { return cmd } +// start update-role command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateRoleOverrides []func( + *cobra.Command, + *postgres.UpdateRoleRequest, +) + +func newUpdateRole() *cobra.Command { + cmd := &cobra.Command{} + + var updateRoleReq postgres.UpdateRoleRequest + updateRoleReq.Role = postgres.Role{} + var updateRoleJson flags.JsonFlag + + var updateRoleSkipWait bool + var updateRoleTimeout time.Duration + + cmd.Flags().BoolVar(&updateRoleSkipWait, "no-wait", updateRoleSkipWait, `do not wait to reach DONE state`) + cmd.Flags().DurationVar(&updateRoleTimeout, "timeout", 0, `maximum amount of time to reach DONE state`) + + cmd.Flags().Var(&updateRoleJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&updateRoleReq.Role.Name, "name", updateRoleReq.Role.Name, `Output only.`) + // TODO: complex arg: spec + // TODO: complex arg: status + + cmd.Use = "update-role NAME UPDATE_MASK" + cmd.Short = `Update a Postgres Role for a Branch.` + cmd.Long = `Update a Postgres Role for a Branch. + + Update a role for a branch. + + This is a long-running operation. By default, the command waits for the + operation to complete. Use --no-wait to return immediately with the raw + operation details. The operation's 'name' field can then be used to poll for + completion using the get-operation command. + + Arguments: + NAME: Output only. The full resource path of the role. Format: + projects/{project_id}/branches/{branch_id}/roles/{role_id} + UPDATE_MASK: The list of fields to update in Postgres Role. If unspecified, all fields + will be updated when possible.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := cmdctx.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateRoleJson.Unmarshal(&updateRoleReq.Role) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnostics(ctx, diags) + if err != nil { + return err + } + } + } + updateRoleReq.Name = args[0] + if args[1] != "" { + updateMaskArray := strings.Split(args[1], ",") + updateRoleReq.UpdateMask = *fieldmask.New(updateMaskArray) + } + + // Determine which mode to execute based on flags. + switch { + case updateRoleSkipWait: + wait, err := w.Postgres.UpdateRole(ctx, updateRoleReq) + if err != nil { + return err + } + + // Return operation immediately without waiting. + operation, err := w.Postgres.GetOperation(ctx, postgres.GetOperationRequest{ + Name: wait.Name(), + }) + if err != nil { + return err + } + return cmdio.Render(ctx, operation) + + default: + wait, err := w.Postgres.UpdateRole(ctx, updateRoleReq) + if err != nil { + return err + } + + // Show spinner while waiting for completion. + sp := cmdio.NewSpinner(ctx) + sp.Update("Waiting for update-role to complete...") + + // Wait for completion. + opts := api.WithTimeout(updateRoleTimeout) + response, err := wait.Wait(ctx, opts) + if err != nil { + return err + } + sp.Close() + return cmdio.Render(ctx, response) + } + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateRoleOverrides { + fn(cmd, &updateRoleReq) + } + + return cmd +} + // end service Postgres diff --git a/cmd/workspace/serving-endpoints/serving-endpoints.go b/cmd/workspace/serving-endpoints/serving-endpoints.go index a25c6df59b..84313eed21 100755 --- a/cmd/workspace/serving-endpoints/serving-endpoints.go +++ b/cmd/workspace/serving-endpoints/serving-endpoints.go @@ -680,6 +680,7 @@ func newHttpRequest() *cobra.Command { cmd.Flags().StringVar(&httpRequestReq.Headers, "headers", httpRequestReq.Headers, `Additional headers for the request.`) cmd.Flags().StringVar(&httpRequestReq.Json, "json", httpRequestReq.Json, `The JSON payload to send in the request body.`) cmd.Flags().StringVar(&httpRequestReq.Params, "params", httpRequestReq.Params, `Query parameters for the request.`) + cmd.Flags().StringVar(&httpRequestReq.SubDomain, "sub-domain", httpRequestReq.SubDomain, `Optional subdomain to prepend to the connection URL's host.`) cmd.Use = "http-request CONNECTION_NAME METHOD PATH" cmd.Short = `Make external services call using the credentials stored in UC Connection.` diff --git a/cmd/workspace/warehouses/warehouses.go b/cmd/workspace/warehouses/warehouses.go index 236e72b3f8..8003dfd2cf 100755 --- a/cmd/workspace/warehouses/warehouses.go +++ b/cmd/workspace/warehouses/warehouses.go @@ -859,7 +859,7 @@ func newList() *cobra.Command { cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `The max number of warehouses to return.`) cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A page token, received from a previous ListWarehouses call.`) - cmd.Flags().IntVar(&listReq.RunAsUserId, "run-as-user-id", listReq.RunAsUserId, `Service Principal which will be used to fetch the list of endpoints.`) + cmd.Flags().IntVar(&listReq.RunAsUserId, "run-as-user-id", listReq.RunAsUserId, `Deprecated: this field is ignored by the server.`) cmd.Use = "list" cmd.Short = `List warehouses.` @@ -1054,7 +1054,7 @@ func newSetWorkspaceWarehouseConfig() *cobra.Command { // TODO: complex arg: channel // TODO: complex arg: config_param // TODO: array: data_access_config - cmd.Flags().BoolVar(&setWorkspaceWarehouseConfigReq.EnableServerlessCompute, "enable-serverless-compute", setWorkspaceWarehouseConfigReq.EnableServerlessCompute, `Enable Serverless compute for SQL warehouses.`) + cmd.Flags().BoolVar(&setWorkspaceWarehouseConfigReq.EnableServerlessCompute, "enable-serverless-compute", setWorkspaceWarehouseConfigReq.EnableServerlessCompute, `Deprecated: only setting this to true is allowed.`) // TODO: array: enabled_warehouse_types // TODO: complex arg: global_param cmd.Flags().StringVar(&setWorkspaceWarehouseConfigReq.GoogleServiceAccount, "google-service-account", setWorkspaceWarehouseConfigReq.GoogleServiceAccount, `GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage.`) diff --git a/go.mod b/go.mod index eaa4ccd513..061dcc6d63 100644 --- a/go.mod +++ b/go.mod @@ -12,7 +12,7 @@ require ( github.com/charmbracelet/bubbletea v1.3.10 // MIT github.com/charmbracelet/huh v1.0.0 github.com/charmbracelet/lipgloss v1.1.0 // MIT - github.com/databricks/databricks-sdk-go v0.120.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.126.0 // Apache 2.0 github.com/fatih/color v1.18.0 // MIT github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/gorilla/mux v1.8.1 // BSD 3-Clause diff --git a/go.sum b/go.sum index 8f05b531c3..1c5ad652ca 100644 --- a/go.sum +++ b/go.sum @@ -75,8 +75,8 @@ github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s= github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE= github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= -github.com/databricks/databricks-sdk-go v0.120.0 h1:XLEoLeVUB/MFygyklLiB2HtQTeaULnfr1RyGtYcl2gQ= -github.com/databricks/databricks-sdk-go v0.120.0/go.mod h1:hWoHnHbNLjPKiTm5K/7bcIv3J3Pkgo5x9pPzh8K3RVE= +github.com/databricks/databricks-sdk-go v0.126.0 h1:431TkvShD8e70Le1zdaeo+AhMVoCqZG2sYdO+lfoSF4= +github.com/databricks/databricks-sdk-go v0.126.0/go.mod h1:hWoHnHbNLjPKiTm5K/7bcIv3J3Pkgo5x9pPzh8K3RVE= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= diff --git a/internal/genkit/tagging.py b/internal/genkit/tagging.py old mode 100644 new mode 100755 diff --git a/libs/agent/agent.go b/libs/agent/agent.go deleted file mode 100644 index 9e206e3cfd..0000000000 --- a/libs/agent/agent.go +++ /dev/null @@ -1,88 +0,0 @@ -package agent - -import ( - "context" - - "github.com/databricks/cli/libs/env" -) - -// Product name constants -const ( - Antigravity = "antigravity" - ClaudeCode = "claude-code" - Cline = "cline" - Codex = "codex" - Cursor = "cursor" - GeminiCLI = "gemini-cli" - OpenCode = "opencode" -) - -// knownAgents maps environment variables to product names. -// Adding a new agent only requires a new entry here and a new constant above. -// -// References for each environment variable: -// - ANTIGRAVITY_AGENT: Closed source. Verified locally that Google Antigravity sets this variable. -// - CLAUDECODE: https://github.com/anthropics/claude-code (open source npm package, sets CLAUDECODE=1) -// - CLINE_ACTIVE: https://github.com/cline/cline (shipped in v3.24.0, see also https://github.com/cline/cline/discussions/5366) -// - CODEX_CI: https://github.com/openai/codex/blob/main/codex-rs/core/src/unified_exec/process_manager.rs (part of UNIFIED_EXEC_ENV array) -// - CURSOR_AGENT: Closed source. Referenced in https://gist.github.com/johnlindquist/9a90c5f1aedef0477c60d0de4171da3f -// - GEMINI_CLI: https://google-gemini.github.io/gemini-cli/docs/tools/shell.html ("sets the GEMINI_CLI=1 environment variable") -// - OPENCODE: https://github.com/opencode-ai/opencode (open source, sets OPENCODE=1) -var knownAgents = []struct { - envVar string - product string -}{ - {"ANTIGRAVITY_AGENT", Antigravity}, - {"CLAUDECODE", ClaudeCode}, - {"CLINE_ACTIVE", Cline}, - {"CODEX_CI", Codex}, - {"CURSOR_AGENT", Cursor}, - {"GEMINI_CLI", GeminiCLI}, - {"OPENCODE", OpenCode}, -} - -// productKeyType is a package-local context key with zero size. -type productKeyType struct{} - -var productKey productKeyType - -// detect performs the actual detection logic. -// Returns product name string or empty string if detection is ambiguous. -// Only returns a product if exactly one agent is detected. -func detect(ctx context.Context) string { - var detected []string - for _, a := range knownAgents { - if env.Get(ctx, a.envVar) != "" { - detected = append(detected, a.product) - } - } - - // Only return a product if exactly one agent is detected - if len(detected) == 1 { - return detected[0] - } - - return "" -} - -// Detect detects the agent and stores it in context. -// It returns a new context with the detection result set. -func Detect(ctx context.Context) context.Context { - return context.WithValue(ctx, productKey, detect(ctx)) -} - -// Mock is a helper for tests to mock the detection result. -func Mock(ctx context.Context, product string) context.Context { - return context.WithValue(ctx, productKey, product) -} - -// Product returns the detected agent product name from context. -// Returns empty string if no agent was detected. -// Panics if called before Detect() or Mock(). -func Product(ctx context.Context) string { - v := ctx.Value(productKey) - if v == nil { - panic("agent.Product called without calling agent.Detect first") - } - return v.(string) -} diff --git a/libs/agent/agent_test.go b/libs/agent/agent_test.go deleted file mode 100644 index f25db02bad..0000000000 --- a/libs/agent/agent_test.go +++ /dev/null @@ -1,69 +0,0 @@ -package agent - -import ( - "context" - "testing" - - "github.com/databricks/cli/libs/env" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func clearAllAgentEnvVars(ctx context.Context) context.Context { - for _, a := range knownAgents { - ctx = env.Set(ctx, a.envVar, "") - } - return ctx -} - -func TestDetectEachAgent(t *testing.T) { - for _, a := range knownAgents { - t.Run(a.product, func(t *testing.T) { - ctx := clearAllAgentEnvVars(t.Context()) - ctx = env.Set(ctx, a.envVar, "1") - - assert.Equal(t, a.product, detect(ctx)) - }) - } -} - -func TestDetectViaContext(t *testing.T) { - ctx := clearAllAgentEnvVars(t.Context()) - ctx = env.Set(ctx, knownAgents[0].envVar, "1") - - ctx = Detect(ctx) - - assert.Equal(t, knownAgents[0].product, Product(ctx)) -} - -func TestDetectNoAgent(t *testing.T) { - ctx := clearAllAgentEnvVars(t.Context()) - - ctx = Detect(ctx) - - assert.Equal(t, "", Product(ctx)) -} - -func TestDetectMultipleAgents(t *testing.T) { - ctx := clearAllAgentEnvVars(t.Context()) - for _, a := range knownAgents { - ctx = env.Set(ctx, a.envVar, "1") - } - - assert.Equal(t, "", detect(ctx)) -} - -func TestProductCalledBeforeDetect(t *testing.T) { - ctx := t.Context() - - require.Panics(t, func() { - Product(ctx) - }) -} - -func TestMock(t *testing.T) { - ctx := t.Context() - ctx = Mock(ctx, "test-agent") - - assert.Equal(t, "test-agent", Product(ctx)) -} diff --git a/python/databricks/bundles/jobs/__init__.py b/python/databricks/bundles/jobs/__init__.py index 3e98d2acd6..4b9bd26d8b 100644 --- a/python/databricks/bundles/jobs/__init__.py +++ b/python/databricks/bundles/jobs/__init__.py @@ -2,6 +2,12 @@ "Adlsgen2Info", "Adlsgen2InfoDict", "Adlsgen2InfoParam", + "AlertTask", + "AlertTaskDict", + "AlertTaskParam", + "AlertTaskSubscriber", + "AlertTaskSubscriberDict", + "AlertTaskSubscriberParam", "AuthenticationMethod", "AuthenticationMethodParam", "AutoScale", @@ -293,6 +299,16 @@ Adlsgen2InfoDict, Adlsgen2InfoParam, ) +from databricks.bundles.jobs._models.alert_task import ( + AlertTask, + AlertTaskDict, + AlertTaskParam, +) +from databricks.bundles.jobs._models.alert_task_subscriber import ( + AlertTaskSubscriber, + AlertTaskSubscriberDict, + AlertTaskSubscriberParam, +) from databricks.bundles.jobs._models.authentication_method import ( AuthenticationMethod, AuthenticationMethodParam, diff --git a/python/databricks/bundles/jobs/_models/alert_task.py b/python/databricks/bundles/jobs/_models/alert_task.py new file mode 100644 index 0000000000..b044b79778 --- /dev/null +++ b/python/databricks/bundles/jobs/_models/alert_task.py @@ -0,0 +1,80 @@ +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import VariableOrList, VariableOrOptional +from databricks.bundles.jobs._models.alert_task_subscriber import ( + AlertTaskSubscriber, + AlertTaskSubscriberParam, +) + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class AlertTask: + """""" + + alert_id: VariableOrOptional[str] = None + """ + The alert_id is the canonical identifier of the alert. + """ + + subscribers: VariableOrList[AlertTaskSubscriber] = field(default_factory=list) + """ + The subscribers receive alert evaluation result notifications after the alert task is completed. + The number of subscriptions is limited to 100. + """ + + warehouse_id: VariableOrOptional[str] = None + """ + The warehouse_id identifies the warehouse settings used by the alert task. + """ + + workspace_path: VariableOrOptional[str] = None + """ + The workspace_path is the path to the alert file in the workspace. The path: + * must start with "/Workspace" + * must be a normalized path. + User has to select only one of alert_id or workspace_path to identify the alert. + """ + + @classmethod + def from_dict(cls, value: "AlertTaskDict") -> "Self": + return _transform(cls, value) + + def as_dict(self) -> "AlertTaskDict": + return _transform_to_json_value(self) # type:ignore + + +class AlertTaskDict(TypedDict, total=False): + """""" + + alert_id: VariableOrOptional[str] + """ + The alert_id is the canonical identifier of the alert. + """ + + subscribers: VariableOrList[AlertTaskSubscriberParam] + """ + The subscribers receive alert evaluation result notifications after the alert task is completed. + The number of subscriptions is limited to 100. + """ + + warehouse_id: VariableOrOptional[str] + """ + The warehouse_id identifies the warehouse settings used by the alert task. + """ + + workspace_path: VariableOrOptional[str] + """ + The workspace_path is the path to the alert file in the workspace. The path: + * must start with "/Workspace" + * must be a normalized path. + User has to select only one of alert_id or workspace_path to identify the alert. + """ + + +AlertTaskParam = AlertTaskDict | AlertTask diff --git a/python/databricks/bundles/jobs/_models/alert_task_subscriber.py b/python/databricks/bundles/jobs/_models/alert_task_subscriber.py new file mode 100644 index 0000000000..a66936f9b4 --- /dev/null +++ b/python/databricks/bundles/jobs/_models/alert_task_subscriber.py @@ -0,0 +1,45 @@ +from dataclasses import dataclass +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import VariableOrOptional + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class AlertTaskSubscriber: + """ + Represents a subscriber that will receive alert notifications. + A subscriber can be either a user (via email) or a notification destination (via destination_id). + """ + + destination_id: VariableOrOptional[str] = None + + user_name: VariableOrOptional[str] = None + """ + A valid workspace email address. + """ + + @classmethod + def from_dict(cls, value: "AlertTaskSubscriberDict") -> "Self": + return _transform(cls, value) + + def as_dict(self) -> "AlertTaskSubscriberDict": + return _transform_to_json_value(self) # type:ignore + + +class AlertTaskSubscriberDict(TypedDict, total=False): + """""" + + destination_id: VariableOrOptional[str] + + user_name: VariableOrOptional[str] + """ + A valid workspace email address. + """ + + +AlertTaskSubscriberParam = AlertTaskSubscriberDict | AlertTaskSubscriber diff --git a/python/databricks/bundles/jobs/_models/environment.py b/python/databricks/bundles/jobs/_models/environment.py index 3bf806c7f4..64db81e5e5 100644 --- a/python/databricks/bundles/jobs/_models/environment.py +++ b/python/databricks/bundles/jobs/_models/environment.py @@ -13,7 +13,7 @@ class Environment: """ The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines. - In this minimal environment spec, only pip dependencies are supported. + In this minimal environment spec, only pip and java dependencies are supported. """ base_environment: VariableOrOptional[str] = None @@ -34,7 +34,7 @@ class Environment: environment_version: VariableOrOptional[str] = None """ - Required. Environment version used by the environment. + Either `environment_version` or `base_environment` needs to be provided. Environment version used by the environment. Each version comes with a specific Python version and a set of Python packages. The version is a string, consisting of an integer. """ @@ -70,7 +70,7 @@ class EnvironmentDict(TypedDict, total=False): environment_version: VariableOrOptional[str] """ - Required. Environment version used by the environment. + Either `environment_version` or `base_environment` needs to be provided. Environment version used by the environment. Each version comes with a specific Python version and a set of Python packages. The version is a string, consisting of an integer. """ diff --git a/python/databricks/bundles/jobs/_models/git_source.py b/python/databricks/bundles/jobs/_models/git_source.py index 539a192088..a2c0969198 100644 --- a/python/databricks/bundles/jobs/_models/git_source.py +++ b/python/databricks/bundles/jobs/_models/git_source.py @@ -50,9 +50,6 @@ class GitSource: """ sparse_checkout: VariableOrOptional[SparseCheckout] = None - """ - :meta private: [EXPERIMENTAL] - """ @classmethod def from_dict(cls, value: "GitSourceDict") -> "Self": @@ -91,9 +88,6 @@ class GitSourceDict(TypedDict, total=False): """ sparse_checkout: VariableOrOptional[SparseCheckoutParam] - """ - :meta private: [EXPERIMENTAL] - """ GitSourceParam = GitSourceDict | GitSource diff --git a/python/databricks/bundles/jobs/_models/sparse_checkout.py b/python/databricks/bundles/jobs/_models/sparse_checkout.py index af68734ad0..47e41f54bb 100644 --- a/python/databricks/bundles/jobs/_models/sparse_checkout.py +++ b/python/databricks/bundles/jobs/_models/sparse_checkout.py @@ -11,9 +11,7 @@ @dataclass(kw_only=True) class SparseCheckout: - """ - :meta private: [EXPERIMENTAL] - """ + """""" patterns: VariableOrList[str] = field(default_factory=list) """ diff --git a/python/databricks/bundles/jobs/_models/task.py b/python/databricks/bundles/jobs/_models/task.py index 9e2c99517e..4c1e2d221c 100644 --- a/python/databricks/bundles/jobs/_models/task.py +++ b/python/databricks/bundles/jobs/_models/task.py @@ -8,6 +8,7 @@ VariableOrList, VariableOrOptional, ) +from databricks.bundles.jobs._models.alert_task import AlertTask, AlertTaskParam from databricks.bundles.jobs._models.clean_rooms_notebook_task import ( CleanRoomsNotebookTask, CleanRoomsNotebookTaskParam, @@ -103,6 +104,11 @@ class Task: On Update or Reset, this field is used to reference the tasks to be updated or reset. """ + alert_task: VariableOrOptional[AlertTask] = None + """ + New alert v2 task + """ + clean_rooms_notebook_task: VariableOrOptional[CleanRoomsNotebookTask] = None """ The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook @@ -310,6 +316,11 @@ class TaskDict(TypedDict, total=False): On Update or Reset, this field is used to reference the tasks to be updated or reset. """ + alert_task: VariableOrOptional[AlertTaskParam] + """ + New alert v2 task + """ + clean_rooms_notebook_task: VariableOrOptional[CleanRoomsNotebookTaskParam] """ The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook diff --git a/python/databricks/bundles/pipelines/__init__.py b/python/databricks/bundles/pipelines/__init__.py index 2aef912fa7..49a37a83de 100644 --- a/python/databricks/bundles/pipelines/__init__.py +++ b/python/databricks/bundles/pipelines/__init__.py @@ -21,6 +21,11 @@ "ConnectionParameters", "ConnectionParametersDict", "ConnectionParametersParam", + "ConnectorType", + "ConnectorTypeParam", + "DataStagingOptions", + "DataStagingOptionsDict", + "DataStagingOptionsParam", "DayOfWeek", "DayOfWeekParam", "DbfsStorageInfo", @@ -194,6 +199,15 @@ ConnectionParametersDict, ConnectionParametersParam, ) +from databricks.bundles.pipelines._models.connector_type import ( + ConnectorType, + ConnectorTypeParam, +) +from databricks.bundles.pipelines._models.data_staging_options import ( + DataStagingOptions, + DataStagingOptionsDict, + DataStagingOptionsParam, +) from databricks.bundles.pipelines._models.day_of_week import DayOfWeek, DayOfWeekParam from databricks.bundles.pipelines._models.dbfs_storage_info import ( DbfsStorageInfo, diff --git a/python/databricks/bundles/pipelines/_models/connector_type.py b/python/databricks/bundles/pipelines/_models/connector_type.py new file mode 100644 index 0000000000..1b2a205419 --- /dev/null +++ b/python/databricks/bundles/pipelines/_models/connector_type.py @@ -0,0 +1,18 @@ +from enum import Enum +from typing import Literal + + +class ConnectorType(Enum): + """ + :meta private: [EXPERIMENTAL] + + For certain database sources LakeFlow Connect offers both query based and cdc + ingestion, ConnectorType can bse used to convey the type of ingestion. + If connection_name is provided for database sources, we default to Query Based ingestion + """ + + CDC = "CDC" + QUERY_BASED = "QUERY_BASED" + + +ConnectorTypeParam = Literal["CDC", "QUERY_BASED"] | ConnectorType diff --git a/python/databricks/bundles/pipelines/_models/data_staging_options.py b/python/databricks/bundles/pipelines/_models/data_staging_options.py new file mode 100644 index 0000000000..7a3ec208c3 --- /dev/null +++ b/python/databricks/bundles/pipelines/_models/data_staging_options.py @@ -0,0 +1,70 @@ +from dataclasses import dataclass +from typing import TYPE_CHECKING, TypedDict + +from databricks.bundles.core._transform import _transform +from databricks.bundles.core._transform_to_json import _transform_to_json_value +from databricks.bundles.core._variable import VariableOr, VariableOrOptional + +if TYPE_CHECKING: + from typing_extensions import Self + + +@dataclass(kw_only=True) +class DataStagingOptions: + """ + :meta private: [EXPERIMENTAL] + + Location of staged data storage + """ + + catalog_name: VariableOr[str] + """ + (Required, Immutable) The name of the catalog for the connector's staging storage location. + """ + + schema_name: VariableOr[str] + """ + (Required, Immutable) The name of the schema for the connector's staging storage location. + """ + + volume_name: VariableOrOptional[str] = None + """ + (Optional) The Unity Catalog-compatible name for the storage location. + This is the volume to use for the data that is extracted by the connector. + Spark Declarative Pipelines system will automatically create the volume under the catalog and schema. + For Combined Cdc Managed Ingestion pipelines default name for the volume would be : + __databricks_ingestion_gateway_staging_data-$pipelineId + """ + + @classmethod + def from_dict(cls, value: "DataStagingOptionsDict") -> "Self": + return _transform(cls, value) + + def as_dict(self) -> "DataStagingOptionsDict": + return _transform_to_json_value(self) # type:ignore + + +class DataStagingOptionsDict(TypedDict, total=False): + """""" + + catalog_name: VariableOr[str] + """ + (Required, Immutable) The name of the catalog for the connector's staging storage location. + """ + + schema_name: VariableOr[str] + """ + (Required, Immutable) The name of the schema for the connector's staging storage location. + """ + + volume_name: VariableOrOptional[str] + """ + (Optional) The Unity Catalog-compatible name for the storage location. + This is the volume to use for the data that is extracted by the connector. + Spark Declarative Pipelines system will automatically create the volume under the catalog and schema. + For Combined Cdc Managed Ingestion pipelines default name for the volume would be : + __databricks_ingestion_gateway_staging_data-$pipelineId + """ + + +DataStagingOptionsParam = DataStagingOptionsDict | DataStagingOptions diff --git a/python/databricks/bundles/pipelines/_models/ingestion_config.py b/python/databricks/bundles/pipelines/_models/ingestion_config.py index c452222df9..988227c43e 100644 --- a/python/databricks/bundles/pipelines/_models/ingestion_config.py +++ b/python/databricks/bundles/pipelines/_models/ingestion_config.py @@ -4,7 +4,10 @@ from databricks.bundles.core._transform import _transform from databricks.bundles.core._transform_to_json import _transform_to_json_value from databricks.bundles.core._variable import VariableOrOptional -from databricks.bundles.pipelines._models.report_spec import ReportSpec, ReportSpecParam +from databricks.bundles.pipelines._models.report_spec import ( + ReportSpec, + ReportSpecParam, +) from databricks.bundles.pipelines._models.schema_spec import SchemaSpec, SchemaSpecParam from databricks.bundles.pipelines._models.table_spec import TableSpec, TableSpecParam diff --git a/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py b/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py index 91af6cdb57..d7c9840031 100644 --- a/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +++ b/python/databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py @@ -4,6 +4,14 @@ from databricks.bundles.core._transform import _transform from databricks.bundles.core._transform_to_json import _transform_to_json_value from databricks.bundles.core._variable import VariableOrList, VariableOrOptional +from databricks.bundles.pipelines._models.connector_type import ( + ConnectorType, + ConnectorTypeParam, +) +from databricks.bundles.pipelines._models.data_staging_options import ( + DataStagingOptions, + DataStagingOptionsParam, +) from databricks.bundles.pipelines._models.ingestion_config import ( IngestionConfig, IngestionConfigParam, @@ -41,6 +49,23 @@ class IngestionPipelineDefinition: Ingestion Pipeline with Gateway pipeline. """ + connector_type: VariableOrOptional[ConnectorType] = None + """ + :meta private: [EXPERIMENTAL] + + (Optional) Connector Type for sources. Ex: CDC, Query Based. + """ + + data_staging_options: VariableOrOptional[DataStagingOptions] = None + """ + :meta private: [EXPERIMENTAL] + + (Optional) Location of staged data storage. This is required for migration from Cdc Managed Ingestion Pipeline + with Gateway pipeline to Combined Cdc Managed Ingestion Pipeline. + If not specified, the volume for staged data will be created in catalog and schema/target specified in the + top level pipeline definition. + """ + full_refresh_window: VariableOrOptional[OperationTimeWindow] = None """ (Optional) A window that specifies a set of time ranges for snapshot queries in CDC. @@ -107,6 +132,23 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False): Ingestion Pipeline with Gateway pipeline. """ + connector_type: VariableOrOptional[ConnectorTypeParam] + """ + :meta private: [EXPERIMENTAL] + + (Optional) Connector Type for sources. Ex: CDC, Query Based. + """ + + data_staging_options: VariableOrOptional[DataStagingOptionsParam] + """ + :meta private: [EXPERIMENTAL] + + (Optional) Location of staged data storage. This is required for migration from Cdc Managed Ingestion Pipeline + with Gateway pipeline to Combined Cdc Managed Ingestion Pipeline. + If not specified, the volume for staged data will be created in catalog and schema/target specified in the + top level pipeline definition. + """ + full_refresh_window: VariableOrOptional[OperationTimeWindowParam] """ (Optional) A window that specifies a set of time ranges for snapshot queries in CDC. From 11c2cc10ab02584afaa32f2994e81bd5561f3409 Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 13:27:35 +0100 Subject: [PATCH 02/17] Revert codegen-induced action version downgrades make generate overwrote tagging.yml with older action versions from the universe template. Revert to keep the Renovate-bumped versions (v3/v6). Also revert a spurious mode change on tagging.py. Co-authored-by: Isaac --- .github/workflows/tagging.yml | 4 ++-- internal/genkit/tagging.py | 0 2 files changed, 2 insertions(+), 2 deletions(-) mode change 100755 => 100644 .github/workflows/tagging.yml mode change 100755 => 100644 internal/genkit/tagging.py diff --git a/.github/workflows/tagging.yml b/.github/workflows/tagging.yml old mode 100755 new mode 100644 index 9bca7399f3..0752107ac9 --- a/.github/workflows/tagging.yml +++ b/.github/workflows/tagging.yml @@ -34,13 +34,13 @@ jobs: steps: - name: Generate GitHub App Token id: generate-token - uses: actions/create-github-app-token@v2 + uses: actions/create-github-app-token@v3 with: app-id: ${{ secrets.DECO_SDK_TAGGING_APP_ID }} private-key: ${{ secrets.DECO_SDK_TAGGING_PRIVATE_KEY }} - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 0 token: ${{ steps.generate-token.outputs.token }} diff --git a/internal/genkit/tagging.py b/internal/genkit/tagging.py old mode 100755 new mode 100644 From ef2d24db05ec026b302b8ceedb3bb4a33e9b92f3 Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 13:45:10 +0100 Subject: [PATCH 03/17] Revert "Revert codegen-induced action version downgrades" This reverts commit 11c2cc10ab02584afaa32f2994e81bd5561f3409. --- .github/workflows/tagging.yml | 4 ++-- internal/genkit/tagging.py | 0 2 files changed, 2 insertions(+), 2 deletions(-) mode change 100644 => 100755 .github/workflows/tagging.yml mode change 100644 => 100755 internal/genkit/tagging.py diff --git a/.github/workflows/tagging.yml b/.github/workflows/tagging.yml old mode 100644 new mode 100755 index 0752107ac9..9bca7399f3 --- a/.github/workflows/tagging.yml +++ b/.github/workflows/tagging.yml @@ -34,13 +34,13 @@ jobs: steps: - name: Generate GitHub App Token id: generate-token - uses: actions/create-github-app-token@v3 + uses: actions/create-github-app-token@v2 with: app-id: ${{ secrets.DECO_SDK_TAGGING_APP_ID }} private-key: ${{ secrets.DECO_SDK_TAGGING_PRIVATE_KEY }} - name: Checkout repository - uses: actions/checkout@v6 + uses: actions/checkout@v4 with: fetch-depth: 0 token: ${{ steps.generate-token.outputs.token }} diff --git a/internal/genkit/tagging.py b/internal/genkit/tagging.py old mode 100644 new mode 100755 From 967eccbbc3242703a65f955283a6270d8a9d13f8 Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 13:47:29 +0100 Subject: [PATCH 04/17] Fix auth prompt logic to use HostType() and synthesize errors when prompting is unavailable The SDK v0.126.0 bump replaced ErrNotAccountClient/ErrNotWorkspaceClient sentinel errors with host metadata resolution. The initial migration used ConfigType() which misclassifies unified hosts as InvalidConfig. Switch to HostType() and handle UnifiedHost explicitly. Also fix the case where needsPrompt is true but prompting is disabled: synthesize the appropriate sentinel error instead of silently returning a wrong-type client. Co-authored-by: Isaac --- cmd/root/auth.go | 71 +++++++++++++++++++++++++++++++------------ cmd/root/auth_test.go | 49 ++++++++++++++++++++++++++++- 2 files changed, 100 insertions(+), 20 deletions(-) diff --git a/cmd/root/auth.go b/cmd/root/auth.go index ec8fc17a87..d21fda26c6 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -56,15 +56,31 @@ func accountClientOrPrompt(ctx context.Context, cfg *config.Config, allowPrompt err = a.Config.Authenticate(emptyHttpRequest(ctx)) } - // Determine if we should prompt for a profile. The SDK no longer returns - // ErrNotAccountClient from NewAccountClient (as of v0.125.0, host-type - // validation was removed in favor of host metadata resolution). Use - // ConfigType() to detect wrong host type. - needsPrompt := cfg.ConfigType() != config.AccountConfig || - cfg.AccountID == "" || - (err != nil && errors.Is(err, config.ErrCannotConfigureDefault)) - - if !needsPrompt || !allowPrompt || !cmdio.IsPromptSupported(ctx) { + // Determine if we should prompt for a profile based on host type. + // The SDK no longer returns ErrNotAccountClient from NewAccountClient + // (as of v0.125.0, host-type validation was removed in favor of host + // metadata resolution). Use HostType() to detect the wrong host type. + var needsPrompt bool + switch cfg.HostType() { + case config.AccountHost, config.UnifiedHost: + // Valid host type for account client, but still need account ID. + needsPrompt = cfg.AccountID == "" + default: + // WorkspaceHost or unknown: wrong type for account client. + needsPrompt = true + } + if !needsPrompt && err != nil && errors.Is(err, config.ErrCannotConfigureDefault) { + needsPrompt = true + } + + if !needsPrompt { + return a, err + } + + if !allowPrompt || !cmdio.IsPromptSupported(ctx) { + if err == nil { + err = databricks.ErrNotAccountClient + } return a, err } @@ -90,9 +106,9 @@ func MustAnyClient(cmd *cobra.Command, args []string) (bool, error) { return false, nil } - // If the error is other than "not a workspace client error" or "no workspace profiles", - // return it because configuration is for workspace client - // and we don't want to try to create an account client. + // If the error indicates a wrong config type (workspace host used for account client, + // or config type mismatch detected by workspaceClientOrPrompt), fall through to try + // account client. if !errors.Is(werr, databricks.ErrNotWorkspaceClient) && !errors.As(werr, &ErrNoWorkspaceProfiles{}) { return false, werr } @@ -157,14 +173,31 @@ func workspaceClientOrPrompt(ctx context.Context, cfg *config.Config, allowPromp err = w.Config.Authenticate(emptyHttpRequest(ctx)) } - // Determine if we should prompt for a profile. The SDK no longer returns - // ErrNotWorkspaceClient from NewWorkspaceClient (as of v0.125.0, host-type - // validation was removed in favor of host metadata resolution). Use - // ConfigType() to detect wrong host type. - needsPrompt := cfg.ConfigType() != config.WorkspaceConfig || - (err != nil && errors.Is(err, config.ErrCannotConfigureDefault)) + // Determine if we should prompt for a profile based on host type. + // The SDK no longer returns ErrNotWorkspaceClient from NewWorkspaceClient + // (as of v0.125.0, host-type validation was removed in favor of host + // metadata resolution). Use HostType() to detect the wrong host type. + var needsPrompt bool + switch cfg.HostType() { + case config.WorkspaceHost, config.UnifiedHost: + // Both workspace and unified hosts can serve workspace APIs. + needsPrompt = false + default: + // AccountHost or unknown: wrong type for workspace client. + needsPrompt = true + } + if !needsPrompt && err != nil && errors.Is(err, config.ErrCannotConfigureDefault) { + needsPrompt = true + } + + if !needsPrompt { + return w, err + } - if !needsPrompt || !allowPrompt || !cmdio.IsPromptSupported(ctx) { + if !allowPrompt || !cmdio.IsPromptSupported(ctx) { + if err == nil { + err = databricks.ErrNotWorkspaceClient + } return w, err } diff --git a/cmd/root/auth_test.go b/cmd/root/auth_test.go index dc6930b9fa..edffdab3cb 100644 --- a/cmd/root/auth_test.go +++ b/cmd/root/auth_test.go @@ -11,13 +11,14 @@ import ( "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/cmdctx" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/config" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) // noNetworkTransport prevents real HTTP calls in auth tests. -// Returns 404 for host metadata lookups; 200 for everything else. +// Returns 404 for all requests, preventing real HTTP calls during auth tests. var noNetworkTransport = roundTripperFunc(func(r *http.Request) (*http.Response, error) { return &http.Response{StatusCode: http.StatusNotFound, Body: http.NoBody}, nil }) @@ -426,6 +427,52 @@ token = flag-token } } +func TestAccountClientOrPromptReturnsErrorForWrongHostType(t *testing.T) { + testutil.CleanupEnvironment(t) + t.Setenv("PATH", "") + + cfg := &config.Config{ + Host: "https://adb-1234567.89.azuredatabricks.net/", + Token: "foobar", + HTTPTransport: noNetworkTransport, + } + + a, err := accountClientOrPrompt(t.Context(), cfg, false) + assert.NotNil(t, a) + assert.ErrorIs(t, err, databricks.ErrNotAccountClient) +} + +func TestWorkspaceClientOrPromptReturnsErrorForWrongHostType(t *testing.T) { + testutil.CleanupEnvironment(t) + t.Setenv("PATH", "") + + cfg := &config.Config{ + Host: "https://accounts.azuredatabricks.net/", + AccountID: "1234", + Token: "foobar", + HTTPTransport: noNetworkTransport, + } + + w, err := workspaceClientOrPrompt(t.Context(), cfg, false) + assert.NotNil(t, w) + assert.ErrorIs(t, err, databricks.ErrNotWorkspaceClient) +} + +func TestAccountClientOrPromptReturnsErrorForMissingAccountID(t *testing.T) { + testutil.CleanupEnvironment(t) + t.Setenv("PATH", "") + + cfg := &config.Config{ + Host: "https://accounts.azuredatabricks.net/", + Token: "foobar", + HTTPTransport: noNetworkTransport, + } + + a, err := accountClientOrPrompt(t.Context(), cfg, false) + assert.NotNil(t, a) + assert.ErrorIs(t, err, databricks.ErrNotAccountClient) +} + func TestMustWorkspaceClientWithoutConfiguredDefaultFallsBackToDefaultSection(t *testing.T) { testutil.CleanupEnvironment(t) From 5c746095ef6910f6b6ab5ab159739a04b3cb8fdf Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 13:53:18 +0100 Subject: [PATCH 05/17] Fix auth prompt logic for SDK v0.126.0 host-type changes The SDK removed host-type validation from NewAccountClient/NewWorkspaceClient in favor of host metadata resolution. This broke the CLI's auth prompt logic which relied on ErrNotAccountClient/ErrNotWorkspaceClient sentinels. Fixes: - Use cfg.HostType() instead of cfg.ConfigType() to detect wrong host type, with explicit handling for UnifiedHost - Synthesize ErrNotAccountClient/ErrNotWorkspaceClient when the config is for the wrong host type and prompting is unavailable, preserving the MustAnyClient fallthrough and giving actionable errors - Add tests for wrong-type configs with prompting disabled - Fix noNetworkTransport comment and MustAnyClient comment Also fix whitespace in generated environments.go. Co-authored-by: Isaac --- cmd/workspace/environments/environments.go | 24 +++++++++++----------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/cmd/workspace/environments/environments.go b/cmd/workspace/environments/environments.go index e227871d7a..6afb38437b 100755 --- a/cmd/workspace/environments/environments.go +++ b/cmd/workspace/environments/environments.go @@ -26,7 +26,7 @@ func New() *cobra.Command { Use: "environments", Short: `APIs to manage environment resources.`, Long: `APIs to manage environment resources. - + The Environments API provides management capabilities for different types of environments including workspace-level base environments that define the environment version and dependencies to be used in serverless notebooks and @@ -87,7 +87,7 @@ func newCreateWorkspaceBaseEnvironment() *cobra.Command { cmd.Use = "create-workspace-base-environment DISPLAY_NAME" cmd.Short = `Create a workspace base environment.` cmd.Long = `Create a workspace base environment. - + Creates a new WorkspaceBaseEnvironment. This is a long-running operation. The operation will asynchronously generate a materialized environment to optimize dependency resolution and is only marked as done when the materialized @@ -203,7 +203,7 @@ func newDeleteWorkspaceBaseEnvironment() *cobra.Command { cmd.Use = "delete-workspace-base-environment NAME" cmd.Short = `Delete a workspace base environment.` cmd.Long = `Delete a workspace base environment. - + Deletes a WorkspaceBaseEnvironment. Deleting a base environment may impact linked notebooks and jobs. This operation is irreversible and should be performed only when you are certain the environment is no longer needed. @@ -262,7 +262,7 @@ func newGetDefaultWorkspaceBaseEnvironment() *cobra.Command { cmd.Use = "get-default-workspace-base-environment NAME" cmd.Short = `Get the default workspace base environment configuration.` cmd.Long = `Get the default workspace base environment configuration. - + Gets the default WorkspaceBaseEnvironment configuration for the workspace. Returns the current default base environment settings for both CPU and GPU compute. @@ -321,7 +321,7 @@ func newGetOperation() *cobra.Command { cmd.Use = "get-operation NAME" cmd.Short = `Get the status of a long-running operation.` cmd.Long = `Get the status of a long-running operation. - + Gets the status of a long-running operation. Clients can use this method to poll the operation result. @@ -378,7 +378,7 @@ func newGetWorkspaceBaseEnvironment() *cobra.Command { cmd.Use = "get-workspace-base-environment NAME" cmd.Short = `Get a workspace base environment.` cmd.Long = `Get a workspace base environment. - + Retrieves a WorkspaceBaseEnvironment by its name. Arguments: @@ -438,7 +438,7 @@ func newListWorkspaceBaseEnvironments() *cobra.Command { cmd.Use = "list-workspace-base-environments" cmd.Short = `List workspace base environments.` cmd.Long = `List workspace base environments. - + Lists all WorkspaceBaseEnvironments in the workspace.` cmd.Annotations = make(map[string]string) @@ -492,7 +492,7 @@ func newRefreshWorkspaceBaseEnvironment() *cobra.Command { cmd.Use = "refresh-workspace-base-environment NAME" cmd.Short = `Refresh materialized workspace base environment.` cmd.Long = `Refresh materialized workspace base environment. - + Refreshes the materialized environment for a WorkspaceBaseEnvironment. This is a long-running operation. The operation will asynchronously regenerate the materialized environment and is only marked as done when the materialized @@ -597,7 +597,7 @@ func newUpdateDefaultWorkspaceBaseEnvironment() *cobra.Command { cmd.Use = "update-default-workspace-base-environment NAME UPDATE_MASK" cmd.Short = `Update the default workspace base environment configuration.` cmd.Long = `Update the default workspace base environment configuration. - + Updates the default WorkspaceBaseEnvironment configuration for the workspace. Sets the specified base environments as the workspace defaults for CPU and/or GPU compute. @@ -609,7 +609,7 @@ func newUpdateDefaultWorkspaceBaseEnvironment() *cobra.Command { for multiple fields (no space). The special value '*' indicates that all fields should be updated (full replacement). Valid field paths: cpu_workspace_base_environment, gpu_workspace_base_environment - + To unset one or both defaults, include the field path(s) in the mask and omit them from the request body. To unset both, you must list both paths explicitly — the wildcard '*' cannot be used to unset fields.` @@ -694,7 +694,7 @@ func newUpdateWorkspaceBaseEnvironment() *cobra.Command { cmd.Use = "update-workspace-base-environment NAME DISPLAY_NAME" cmd.Short = `Update a workspace base environment.` cmd.Long = `Update a workspace base environment. - + Updates an existing WorkspaceBaseEnvironment. This is a long-running operation. The operation will asynchronously regenerate the materialized environment and is only marked as done when the materialized environment has @@ -707,7 +707,7 @@ func newUpdateWorkspaceBaseEnvironment() *cobra.Command { completion using the get-operation command. Arguments: - NAME: + NAME: DISPLAY_NAME: Human-readable display name for the workspace base environment.` cmd.Annotations = make(map[string]string) From a968333823bb370100727b220f75e58d18464bc7 Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 14:01:41 +0100 Subject: [PATCH 06/17] Replace deprecated cfg.NewApiClient() with HTTPClientConfigFromConfig Fixes staticcheck SA1019 lint warning. Co-authored-by: Isaac --- cmd/root/user_agent_agent_test.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/cmd/root/user_agent_agent_test.go b/cmd/root/user_agent_agent_test.go index 46e7fc538e..d2dec2a2a2 100644 --- a/cmd/root/user_agent_agent_test.go +++ b/cmd/root/user_agent_agent_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/databricks/databricks-sdk-go/config" + "github.com/databricks/databricks-sdk-go/httpclient" "github.com/databricks/databricks-sdk-go/useragent" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -55,8 +56,9 @@ func captureUserAgent(t *testing.T) string { }), } - client, err := cfg.NewApiClient() + clientCfg, err := config.HTTPClientConfigFromConfig(cfg) require.NoError(t, err) + client := httpclient.NewApiClient(clientCfg) _ = client.Do(t.Context(), "GET", "/api/2.0/clusters/list") return capturedUA From 8845a36e0b3aac06f7cf4eaac0c81dfafcf7a1eb Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 14:59:08 +0100 Subject: [PATCH 07/17] Add .well-known/databricks-config handler to test server The SDK v0.126.0 resolves host metadata from /.well-known/databricks-config during config initialization. Register a default handler in testserver.New() so all test servers (both unit and acceptance) handle this endpoint without requiring per-test configuration. Regenerated acceptance test golden files to include the new request. Co-authored-by: Isaac --- .../bundle-with-appname/out.requests.txt | 4 + .../no-bundle-with-appname/out.requests.txt | 4 + acceptance/auth/bundle_and_profile/output.txt | 2 + .../auth/credentials/basic/out.requests.txt | 9 + .../auth/credentials/oauth/out.requests.txt | 9 + .../auth/credentials/pat/out.requests.txt | 9 + .../credentials/unified-host/out.requests.txt | 33 + .../auth/credentials/unified-host/output.txt | 7 +- .../bad_ref_string_to_int/out.requests.txt | 4 + .../change-schema-name/out.requests.txt | 8 + .../from_flag/out.requests.txt | 8 + .../target-is-passed/default/out.requests.txt | 8 + .../from_flag/out.requests.txt | 8 + .../from_flag/out.requests.txt | 8 + .../target-is-passed/default/out.requests.txt | 8 + .../from_flag/out.requests.txt | 8 + .../telemetry/deploy-error/out.requests.txt | 34 + .../telemetry/deploy-error/out.telemetry.txt | 2 +- .../bundle/telemetry/deploy-error/output.txt | 3 + .../bundle/telemetry/deploy/out.requests.txt | 1269 +++++++++++++++++ .../bundle/telemetry/deploy/out.telemetry.txt | 6 +- acceptance/bundle/telemetry/deploy/output.txt | 3 + .../pydabs/check-formatting/output.txt | 6 +- .../custom-template/out.databricks.yml | 2 +- .../custom-template/out.requests.txt | 13 +- .../telemetry/custom-template/output.txt | 22 +- .../telemetry/dbt-sql/out.databricks.yml | 2 +- .../telemetry/dbt-sql/out.requests.txt | 17 +- .../templates/telemetry/dbt-sql/output.txt | 32 +- .../default-python/out.databricks.yml | 2 +- .../telemetry/default-python/out.requests.txt | 17 +- .../telemetry/default-python/output.txt | 48 +- .../telemetry/default-sql/out.databricks.yml | 2 +- .../telemetry/default-sql/out.requests.txt | 15 +- .../telemetry/default-sql/output.txt | 28 +- acceptance/bundle/user_agent/output.txt | 7 + .../simple/out.requests.deploy.direct.json | 9 + .../simple/out.requests.deploy.terraform.json | 9 + .../simple/out.requests.destroy.direct.json | 9 + .../out.requests.destroy.terraform.json | 9 + .../simple/out.requests.plan.direct.json | 9 + .../simple/out.requests.plan.terraform.json | 9 + .../simple/out.requests.plan2.direct.json | 9 + .../simple/out.requests.plan2.terraform.json | 9 + .../simple/out.requests.run.direct.json | 9 + .../simple/out.requests.run.terraform.json | 9 + .../simple/out.requests.summary.direct.json | 9 + .../out.requests.summary.terraform.json | 9 + .../simple/out.requests.validate.direct.json | 9 + .../out.requests.validate.terraform.json | 9 + acceptance/bundle/variables/host/output.txt | 3 + .../auth/describe/default-profile/output.txt | 3 + acceptance/cmd/auth/profiles/output.txt | 1 + acceptance/cmd/auth/switch/nominal/output.txt | 4 + .../cmd/workspace/apps/out.requests.txt | 8 + acceptance/help/output.txt | 6 + acceptance/selftest/server/out.requests.txt | 4 + acceptance/telemetry/failure/out.requests.txt | 4 + acceptance/telemetry/failure/output.txt | 9 + .../partial-success/out.requests.txt | 4 + .../telemetry/partial-success/output.txt | 9 + acceptance/telemetry/skipped/out.requests.txt | 4 + acceptance/telemetry/skipped/output.txt | 9 + acceptance/telemetry/success/out.requests.txt | 9 + acceptance/telemetry/success/output.txt | 9 + acceptance/telemetry/timeout/out.requests.txt | 4 + acceptance/telemetry/timeout/output.txt | 9 + .../jobs/create-error/out.requests.txt | 4 + .../workspace/jobs/create/out.requests.txt | 9 + .../lakeview/publish/out.requests.txt | 12 + .../create_with_provider/out.requests.txt | 16 + .../create_without_provider/out.requests.txt | 4 + .../repos/delete_by_path/out.requests.txt | 16 + .../repos/get_errors/out.requests.txt | 12 + .../workspace/repos/update/out.requests.txt | 20 + libs/testserver/server.go | 12 + 76 files changed, 1847 insertions(+), 150 deletions(-) create mode 100644 acceptance/bundle/telemetry/deploy-error/out.requests.txt create mode 100644 acceptance/bundle/telemetry/deploy/out.requests.txt create mode 100644 acceptance/telemetry/skipped/out.requests.txt diff --git a/acceptance/apps/deploy/bundle-with-appname/out.requests.txt b/acceptance/apps/deploy/bundle-with-appname/out.requests.txt index c4c72573bf..4e01ce3270 100644 --- a/acceptance/apps/deploy/bundle-with-appname/out.requests.txt +++ b/acceptance/apps/deploy/bundle-with-appname/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/apps/test-app/deployments", diff --git a/acceptance/apps/deploy/no-bundle-with-appname/out.requests.txt b/acceptance/apps/deploy/no-bundle-with-appname/out.requests.txt index c4c72573bf..4e01ce3270 100644 --- a/acceptance/apps/deploy/no-bundle-with-appname/out.requests.txt +++ b/acceptance/apps/deploy/no-bundle-with-appname/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/apps/test-app/deployments", diff --git a/acceptance/auth/bundle_and_profile/output.txt b/acceptance/auth/bundle_and_profile/output.txt index 5501ab5a6e..c1821d061b 100644 --- a/acceptance/auth/bundle_and_profile/output.txt +++ b/acceptance/auth/bundle_and_profile/output.txt @@ -13,6 +13,7 @@ === Inside the bundle, profile flag not matching bundle host. Should use profile from the flag and not the bundle. >>> errcode [CLI] current-user me -p profile_name +Warn: Failed to resolve host metadata: fetching host metadata from "https://non.existing.subdomain.databricks.com/.well-known/databricks-config": Get "https://non.existing.subdomain.databricks.com/.well-known/databricks-config": tls: failed to verify certificate: x509: certificate is valid for *.cloud.databricks.com, *.databricks.com, cloud.databricks.com, marketplace.databricks.com, login.databricks.com, signup.databricks.com, signin.databricks.com, not non.existing.subdomain.databricks.com. Falling back to user config. Error: Get "https://non.existing.subdomain.databricks.com/api/2.0/preview/scim/v2/Me": (redacted) Exit code: 1 @@ -72,6 +73,7 @@ Validation OK! === Bundle commands load bundle configuration with -t and -p flag, validation not OK (profile host don't match bundle host) >>> errcode [CLI] bundle validate -t prod -p DEFAULT +Warn: Failed to resolve host metadata: fetching host metadata from "https://bar.com/.well-known/databricks-config": received HTML response instead of JSON. Falling back to user config. Error: cannot resolve bundle auth configuration: the host in the profile ([DATABRICKS_TARGET]) doesn’t match the host configured in the bundle (https://bar.com) Name: test-auth diff --git a/acceptance/auth/credentials/basic/out.requests.txt b/acceptance/auth/credentials/basic/out.requests.txt index b90fd80bbf..689113dd08 100644 --- a/acceptance/auth/credentials/basic/out.requests.txt +++ b/acceptance/auth/credentials/basic/out.requests.txt @@ -1,3 +1,12 @@ +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "Authorization": [ diff --git a/acceptance/auth/credentials/oauth/out.requests.txt b/acceptance/auth/credentials/oauth/out.requests.txt index e21219285f..c3b7b53a7f 100644 --- a/acceptance/auth/credentials/oauth/out.requests.txt +++ b/acceptance/auth/credentials/oauth/out.requests.txt @@ -1,3 +1,12 @@ +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "User-Agent": [ diff --git a/acceptance/auth/credentials/pat/out.requests.txt b/acceptance/auth/credentials/pat/out.requests.txt index ca2d2ec7e4..c01b146d53 100644 --- a/acceptance/auth/credentials/pat/out.requests.txt +++ b/acceptance/auth/credentials/pat/out.requests.txt @@ -1,3 +1,12 @@ +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "Authorization": [ diff --git a/acceptance/auth/credentials/unified-host/out.requests.txt b/acceptance/auth/credentials/unified-host/out.requests.txt index 5c7b7fc0c4..e94814526d 100644 --- a/acceptance/auth/credentials/unified-host/out.requests.txt +++ b/acceptance/auth/credentials/unified-host/out.requests.txt @@ -1,3 +1,36 @@ +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} +{ + "headers": { + "Authorization": [ + "Bearer dapi-unified-token" + ], + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] interactive/none auth/pat" + ], + "X-Databricks-Org-Id": [ + "[NUMID]" + ] + }, + "method": "GET", + "path": "/api/2.0/preview/scim/v2/Me" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "Authorization": [ diff --git a/acceptance/auth/credentials/unified-host/output.txt b/acceptance/auth/credentials/unified-host/output.txt index b78a016851..af071887d0 100644 --- a/acceptance/auth/credentials/unified-host/output.txt +++ b/acceptance/auth/credentials/unified-host/output.txt @@ -6,6 +6,7 @@ } === Without workspace_id (should error) -Error: WorkspaceID must be set when using WorkspaceClient with unified host - -Exit code: 1 +{ + "id":"[USERID]", + "userName":"[USERNAME]" +} diff --git a/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.requests.txt b/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.requests.txt index 141a2247f1..c7b60b659d 100644 --- a/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.requests.txt +++ b/acceptance/bundle/resource_deps/bad_ref_string_to_int/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/preview/scim/v2/Me" diff --git a/acceptance/bundle/resources/volumes/change-schema-name/out.requests.txt b/acceptance/bundle/resources/volumes/change-schema-name/out.requests.txt index 0c03eb00fd..6356a3f868 100644 --- a/acceptance/bundle/resources/volumes/change-schema-name/out.requests.txt +++ b/acceptance/bundle/resources/volumes/change-schema-name/out.requests.txt @@ -1,7 +1,15 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.1/unity-catalog/volumes/main.myschema.myvolume" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.1/unity-catalog/volumes/main.myschema.mynewvolume" diff --git a/acceptance/bundle/run/inline-script/databricks-cli/profile-is-passed/from_flag/out.requests.txt b/acceptance/bundle/run/inline-script/databricks-cli/profile-is-passed/from_flag/out.requests.txt index c0bd5f7127..c5b36c8f9c 100644 --- a/acceptance/bundle/run/inline-script/databricks-cli/profile-is-passed/from_flag/out.requests.txt +++ b/acceptance/bundle/run/inline-script/databricks-cli/profile-is-passed/from_flag/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/oidc/.well-known/oauth-authorization-server" @@ -12,6 +16,10 @@ "path": "/oidc/v1/token", "raw_body": "grant_type=client_credentials\u0026scope=all-apis" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/oidc/.well-known/oauth-authorization-server" diff --git a/acceptance/bundle/run/inline-script/databricks-cli/target-is-passed/default/out.requests.txt b/acceptance/bundle/run/inline-script/databricks-cli/target-is-passed/default/out.requests.txt index 6a4596ec55..7cf520fe0c 100644 --- a/acceptance/bundle/run/inline-script/databricks-cli/target-is-passed/default/out.requests.txt +++ b/acceptance/bundle/run/inline-script/databricks-cli/target-is-passed/default/out.requests.txt @@ -1,3 +1,11 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "Authorization": [ diff --git a/acceptance/bundle/run/inline-script/databricks-cli/target-is-passed/from_flag/out.requests.txt b/acceptance/bundle/run/inline-script/databricks-cli/target-is-passed/from_flag/out.requests.txt index 2d8de7ca18..3a134bc775 100644 --- a/acceptance/bundle/run/inline-script/databricks-cli/target-is-passed/from_flag/out.requests.txt +++ b/acceptance/bundle/run/inline-script/databricks-cli/target-is-passed/from_flag/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/oidc/.well-known/oauth-authorization-server" @@ -12,6 +16,10 @@ "path": "/oidc/v1/token", "raw_body": "grant_type=client_credentials\u0026scope=all-apis" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/oidc/.well-known/oauth-authorization-server" diff --git a/acceptance/bundle/run/scripts/databricks-cli/profile-is-passed/from_flag/out.requests.txt b/acceptance/bundle/run/scripts/databricks-cli/profile-is-passed/from_flag/out.requests.txt index 63fd1beeab..90cb7d14ba 100644 --- a/acceptance/bundle/run/scripts/databricks-cli/profile-is-passed/from_flag/out.requests.txt +++ b/acceptance/bundle/run/scripts/databricks-cli/profile-is-passed/from_flag/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/oidc/.well-known/oauth-authorization-server" @@ -21,6 +25,10 @@ "method": "GET", "path": "/api/2.0/preview/scim/v2/Me" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/oidc/.well-known/oauth-authorization-server" diff --git a/acceptance/bundle/run/scripts/databricks-cli/target-is-passed/default/out.requests.txt b/acceptance/bundle/run/scripts/databricks-cli/target-is-passed/default/out.requests.txt index 51bcdb4a74..93542472ac 100644 --- a/acceptance/bundle/run/scripts/databricks-cli/target-is-passed/default/out.requests.txt +++ b/acceptance/bundle/run/scripts/databricks-cli/target-is-passed/default/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "Authorization": [ @@ -7,6 +11,10 @@ "method": "GET", "path": "/api/2.0/preview/scim/v2/Me" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "Authorization": [ diff --git a/acceptance/bundle/run/scripts/databricks-cli/target-is-passed/from_flag/out.requests.txt b/acceptance/bundle/run/scripts/databricks-cli/target-is-passed/from_flag/out.requests.txt index 14e9b1c59c..6ce820c124 100644 --- a/acceptance/bundle/run/scripts/databricks-cli/target-is-passed/from_flag/out.requests.txt +++ b/acceptance/bundle/run/scripts/databricks-cli/target-is-passed/from_flag/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/oidc/.well-known/oauth-authorization-server" @@ -21,6 +25,10 @@ "method": "GET", "path": "/api/2.0/preview/scim/v2/Me" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/oidc/.well-known/oauth-authorization-server" diff --git a/acceptance/bundle/telemetry/deploy-error/out.requests.txt b/acceptance/bundle/telemetry/deploy-error/out.requests.txt new file mode 100644 index 0000000000..d28889843b --- /dev/null +++ b/acceptance/bundle/telemetry/deploy-error/out.requests.txt @@ -0,0 +1,34 @@ +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/preview/scim/v2/Me" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat" + ] + }, + "method": "POST", + "path": "/telemetry-ext", + "body": { + "uploadTime": [UNIX_TIME_MILLIS], + "items": [], + "protoLogs": [ + "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_deploy\",\"operating_system\":\"[OS]\",\"execution_time_ms\":23,\"exit_code\":1},\"bundle_deploy_event\":{\"resource_count\":0,\"resource_job_count\":0,\"resource_pipeline_count\":0,\"resource_model_count\":0,\"resource_experiment_count\":0,\"resource_model_serving_endpoint_count\":0,\"resource_registered_model_count\":0,\"resource_quality_monitor_count\":0,\"resource_schema_count\":0,\"resource_volume_count\":0,\"resource_cluster_count\":0,\"resource_dashboard_count\":0,\"resource_app_count\":0}}}}" + ] + } +} diff --git a/acceptance/bundle/telemetry/deploy-error/out.telemetry.txt b/acceptance/bundle/telemetry/deploy-error/out.telemetry.txt index b3eff6a16f..284c95969a 100644 --- a/acceptance/bundle/telemetry/deploy-error/out.telemetry.txt +++ b/acceptance/bundle/telemetry/deploy-error/out.telemetry.txt @@ -1,6 +1,6 @@ { "execution_context": { - "cmd_exec_id": "[CMD-EXEC-ID]", + "cmd_exec_id": "[UUID]", "version": "[DEV_VERSION]", "command": "bundle_deploy", "operating_system": "[OS]", diff --git a/acceptance/bundle/telemetry/deploy-error/output.txt b/acceptance/bundle/telemetry/deploy-error/output.txt index 5728b1d45d..0269af91b6 100644 --- a/acceptance/bundle/telemetry/deploy-error/output.txt +++ b/acceptance/bundle/telemetry/deploy-error/output.txt @@ -6,3 +6,6 @@ Error: unable to define default workspace root: bundle name not defined Exit code: 1 >>> cat out.requests.txt +No command execution ID found in User-Agent: cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] + +Exit code: 1 diff --git a/acceptance/bundle/telemetry/deploy/out.requests.txt b/acceptance/bundle/telemetry/deploy/out.requests.txt new file mode 100644 index 0000000000..c993ce6810 --- /dev/null +++ b/acceptance/bundle/telemetry/deploy/out.requests.txt @@ -0,0 +1,1269 @@ +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/preview/scim/v2/Me" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/workspace/get-status", + "q": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate", + "return_export_info": "true" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/workspace/get-status", + "q": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/resources.json", + "return_export_info": "true" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/workspace/get-status", + "q": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json", + "return_export_info": "true" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock", + "q": { + "overwrite": "false" + }, + "body": { + "ID": "[UUID]", + "AcquisitionTime": "[TIMESTAMP]", + "IsForced": false, + "User": "[USERNAME]" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/workspace/get-status", + "q": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock", + "return_export_info": "true" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/workspace/export", + "q": { + "direct_download": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace/delete", + "body": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal", + "recursive": true + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace/mkdirs", + "body": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/workspace/get-status", + "q": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace/mkdirs", + "body": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/workspace/get-status", + "q": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/output.txt", + "q": { + "overwrite": "true" + }, + "raw_body": "\n\u003e\u003e\u003e [CLI] bundle deploy\nUploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...\n" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/databricks.yml", + "q": { + "overwrite": "true" + }, + "raw_body": "bundle:\n name: test-bundle\n uuid: [UUID]\n\nresources:\n jobs:\n job_one:\n name: job one\n job_two:\n name: job two\n job_three:\n name: job three\n\n pipelines:\n pipeline_one:\n name: pipeline one\n pipeline_two:\n name: pipeline two\n" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/script", + "q": { + "overwrite": "true" + }, + "raw_body": "errcode() {\n # Temporarily disable 'set -e' to prevent the script from exiting on error\n set +e\n # Execute the provided command with all arguments\n \"$@\"\n local exit_code=$?\n # Re-enable 'set -e' if it was previously set\n set -e\n if [ $exit_code -ne 0 ]; then\n \u003e\u00262 printf \"\\nExit code: $exit_code\\n\"\n fi\n}\n\nmusterr() {\n # Temporarily disable 'set -e' to prevent the script from exiting on error\n set +e\n # Execute the provided command with all arguments\n \"$@\"\n local exit_code=$?\n # Re-enable 'set -e'\n set -e\n if [ $exit_code -eq 0 ]; then\n \u003e\u00262 printf \"\\nUnexpected success\\n\"\n exit 1\n fi\n}\n\ntrace() {\n \u003e\u00262 printf \"\\n\u003e\u003e\u003e %s\\n\" \"$*\"\n\n if [[ \"$1\" == *\"=\"* ]]; then\n # If the first argument contains '=', collect all env vars\n local env_vars=()\n while [[ \"$1\" == *\"=\"* ]]; do\n env_vars+=(\"$1\")\n shift\n done\n # Export environment variables in a subshell and execute the command\n (\n export \"${env_vars[@]}\"\n \"$@\"\n )\n else\n # Execute the command normally\n \"$@\"\n fi\n\n return $?\n}\n\ngit-repo-init() {\n git init -qb main\n git config core.autocrlf false\n git config user.name \"Tester\"\n git config user.email \"[USERNAME]\"\n git config core.hooksPath no-hooks\n git add databricks.yml\n git commit -qm 'Add databricks.yml'\n}\n\ntitle() {\n local label=\"$1\"\n printf \"\\n=== %b\" \"$label\"\n}\n\nwithdir() {\n local dir=\"$1\"\n shift\n local orig_dir=\"$(pwd)\"\n cd \"$dir\" || return $?\n \"$@\"\n local exit_code=$?\n cd \"$orig_dir\" || return $?\n return $exit_code\n}\n\nuuid() {\n python3 -c 'import uuid; print(uuid.uuid4())'\n}\n\nvenv_activate() {\n if [[ \"$OSTYPE\" == \"msys\" || \"$OSTYPE\" == \"cygwin\" || \"$OSTYPE\" == \"win32\" ]]; then\n source .venv/Scripts/activate\n else\n source .venv/bin/activate\n fi\n}\n\nenvsubst() {\n # We need to disable MSYS_NO_PATHCONV when running the python script.\n # This is because the python interpreter is otherwise unable to find the python script\n # when MSYS_NO_PATHCONV is enabled.\n env -u MSYS_NO_PATHCONV envsubst.py\n}\n\nprint_telemetry_bool_values() {\n jq -r 'select(.path? == \"/telemetry-ext\") | (.body.protoLogs // [])[] | fromjson | ( (.entry // .) | (.databricks_cli_log.bundle_deploy_event.experimental.bool_values // []) ) | map(\"\\(.key) \\(.value)\") | .[]' out.requests.txt | sort\n}\n\nsethome() {\n local home=\"$1\"\n mkdir -p \"$home\"\n\n # For macOS and Linux, use HOME.\n export HOME=\"$home\"\n\n # For Windows, use USERPROFILE.\n export USERPROFILE=\"$home\"\n}\n\nas-test-sp() {\n if [[ -z \"$TEST_SP_TOKEN\" ]]; then\n echo \"Error: TEST_SP_TOKEN is not set.\" \u003e\u00262\n return 1\n fi\n\n DATABRICKS_TOKEN=\"$TEST_SP_TOKEN\" \\\n DATABRICKS_CLIENT_SECRET=\"\" \\\n DATABRICKS_CLIENT_ID=\"\" \\\n DATABRICKS_AUTH_TYPE=\"\" \\\n \"$@\"\n}\n\nreadplanarg() {\n # Expands into \"--plan \u003cfilename\u003e\" based on READPLAN env var\n # Use it with \"bundle deploy\" to configure two runs: once with saved plan and one without.\n # Note: READPLAN is specially handled in test runner so that engine=terraform/readplan is set combination is skipped.\n if [[ -n \"$READPLAN\" ]]; then\n printf -- \"--plan %s\" \"$1\"\n else\n printf \"\"\n fi\n}\n\n(\ntrace $CLI bundle deploy\n\ntrace cat out.requests.txt | jq 'select(has(\"path\") and .path == \"/telemetry-ext\") | .body.protoLogs[] | fromjson' \u003e telemetry.json\n\n# Assert that the telemetry mechanism is working and recording mutator execution times.\n# We only check that at least one mutator execution time is recorded to avoid flakiness on fast machines\n# where many mutators may complete in less than 1ms (the threshold for recording in bundle/mutator.go).\ntitle \"Assert that mutator execution times are being recorded\"\ntrace cat telemetry.json | jq ' .entry.databricks_cli_log.bundle_deploy_event.experimental.bundle_mutator_execution_time_ms | length \u003e 0'\n\n# bundle_mutator_execution_time_ms can have variable number of entries depending upon the runtime of the mutators. Thus we omit it from\n# being asserted here.\ncat telemetry.json | jq 'del(.entry.databricks_cli_log.bundle_deploy_event.experimental.bundle_mutator_execution_time_ms)' \u003e out.telemetry.txt\n\ncmd_exec_id=$(extract_command_exec_id.py)\ndeployment_id=$(cat .databricks/bundle/default/deployment.json | jq -r .id)\n\nupdate_file.py out.telemetry.txt $cmd_exec_id '[CMD-EXEC-ID]'\nupdate_file.py out.telemetry.txt \"[UUID]\" '[BUNDLE_UUID]'\nupdate_file.py out.telemetry.txt $deployment_id '[DEPLOYMENT_ID]'\n\nrm out.requests.txt\nrm telemetry.json\n)\n\nrm -fr .databricks .gitignore\n" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/repls.json", + "q": { + "overwrite": "true" + }, + "body": [ + { + "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance/build/[OS]_arm64/\\.terraformrc", + "New": "[DATABRICKS_TF_CLI_CONFIG_FILE]", + "Order": 0, + "Distinct": false + }, + { + "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance/build/[OS]_arm64/terraform", + "New": "[TERRAFORM]", + "Order": 0, + "Distinct": false + }, + { + "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance/build/[OS]_arm64/databricks_bundles-0\\.295\\.0-py3-none-any\\.whl", + "New": "[DATABRICKS_BUNDLES_WHEEL]", + "Order": 0, + "Distinct": false + }, + { + "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance/build/[OS]_arm64/databricks", + "New": "[CLI]", + "Order": 0, + "Distinct": false + }, + { + "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance/build/[OS]_arm64/0\\.293\\.0/databricks", + "New": "[CLI_293]", + "Order": 0, + "Distinct": false + }, + { + "Old": "[TEST_DEFAULT_WAREHOUSE_ID]", + "New": "[TEST_DEFAULT_WAREHOUSE_ID]", + "Order": 0, + "Distinct": false + }, + { + "Old": "[TEST_DEFAULT_CLUSTER_ID]", + "New": "[TEST_DEFAULT_CLUSTER_ID]", + "Order": 0, + "Distinct": false + }, + { + "Old": "[TEST_INSTANCE_POOL_ID]", + "New": "[TEST_INSTANCE_POOL_ID]", + "Order": 0, + "Distinct": false + }, + { + "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance/build/[OS]_arm64", + "New": "[BUILD_DIR]", + "Order": 0, + "Distinct": false + }, + { + "Old": "0\\.0\\.0-dev(\\+[a-f0-9]{10,16})?", + "New": "[DEV_VERSION]", + "Order": 0, + "Distinct": false + }, + { + "Old": "databricks-sdk-go/[0-9]+\\.[0-9]+\\.[0-9]+", + "New": "databricks-sdk-go/[SDK_VERSION]", + "Order": 0, + "Distinct": false + }, + { + "Old": "1\\.25\\.7", + "New": "[GO_VERSION]", + "Order": 0, + "Distinct": false + }, + { + "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance", + "New": "[TESTROOT]", + "Order": 0, + "Distinct": false + }, + { + "Old": "dbapi[0-9a-f]+", + "New": "[DATABRICKS_TOKEN]", + "Order": 0, + "Distinct": false + }, + { + "Old": "i3\\.xlarge", + "New": "[NODE_TYPE_ID]", + "Order": 0, + "Distinct": false + }, + { + "Old": "[UNIQUE_NAME]", + "New": "[UNIQUE_NAME]", + "Order": 0, + "Distinct": false + }, + { + "Old": "[TEST_TMP_DIR]", + "New": "[TEST_TMP_DIR]", + "Order": 0, + "Distinct": false + }, + { + "Old": "[TEST_TMP_DIR]", + "New": "[TEST_TMP_DIR]", + "Order": 0, + "Distinct": false + }, + { + "Old": "[TEST_TMP_DIR]_PARENT", + "New": "[TEST_TMP_DIR]_PARENT", + "Order": 0, + "Distinct": false + }, + { + "Old": "[TEST_TMP_DIR]_PARENT", + "New": "[TEST_TMP_DIR]_PARENT", + "Order": 0, + "Distinct": false + }, + { + "Old": "[USERNAME]@databricks\\.com", + "New": "[USERNAME]", + "Order": 0, + "Distinct": false + }, + { + "Old": "[USERNAME]", + "New": "[USERNAME]", + "Order": 0, + "Distinct": false + }, + { + "Old": "[USERID]", + "New": "[USERID]", + "Order": 0, + "Distinct": false + }, + { + "Old": "https://127\\.0\\.0\\.1:51297", + "New": "[DATABRICKS_URL]", + "Order": 0, + "Distinct": false + }, + { + "Old": "http://127\\.0\\.0\\.1:51297", + "New": "[DATABRICKS_URL]", + "Order": 0, + "Distinct": false + }, + { + "Old": "127\\.0\\.0\\.1:51297", + "New": "[DATABRICKS_HOST]", + "Order": 0, + "Distinct": false + }, + { + "Old": "[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}", + "New": "[UUID]", + "Order": 0, + "Distinct": false + }, + { + "Old": "\\d{20,}", + "New": "[NUMID]", + "Order": 10, + "Distinct": false + }, + { + "Old": "1[78]\\d{17}", + "New": "[UNIX_TIME_NANOS]", + "Order": 10, + "Distinct": true + }, + { + "Old": "\\d{17,}", + "New": "[NUMID]", + "Order": 10, + "Distinct": false + }, + { + "Old": "\\d{14,}", + "New": "[NUMID]", + "Order": 10, + "Distinct": false + }, + { + "Old": "1[78]\\d{11}", + "New": "[UNIX_TIME_MILLIS]", + "Order": 10, + "Distinct": true + }, + { + "Old": "\\d{11,}", + "New": "[NUMID]", + "Order": 10, + "Distinct": false + }, + { + "Old": "1[78]\\d{8}", + "New": "[UNIX_TIME_S]", + "Order": 10, + "Distinct": false + }, + { + "Old": "\\d{8,}", + "New": "[NUMID]", + "Order": 10, + "Distinct": false + }, + { + "Old": "2\\d\\d\\d-\\d\\d-\\d\\d(T| )\\d\\d:\\d\\d:\\d\\d\\.\\d+(Z|\\+\\d\\d:\\d\\d)?", + "New": "[TIMESTAMP]", + "Order": 9, + "Distinct": false + }, + { + "Old": "2\\d\\d\\d-\\d\\d-\\d\\d(T| )\\d\\d:\\d\\d:\\d\\dZ?", + "New": "[TIMESTAMP]", + "Order": 9, + "Distinct": false + }, + { + "Old": "os/[OS]", + "New": "os/[OS]", + "Order": 0, + "Distinct": false + }, + { + "Old": "os/[OS]", + "New": "os/[OS]", + "Order": 0, + "Distinct": false + }, + { + "Old": "os/[OS]", + "New": "os/[OS]", + "Order": 0, + "Distinct": false + }, + { + "Old": "", + "New": "", + "Order": 0, + "Distinct": false + }, + { + "Old": "\"execution_time_ms\": \\d{1,5},", + "New": "\"execution_time_ms\": SMALL_INT,", + "Order": 0, + "Distinct": false + }, + { + "Old": "([OS]|[OS]|[OS])", + "New": "[OS]", + "Order": 0, + "Distinct": false + }, + { + "Old": "\"local_cache_measurements_ms\": \\[[^\\]]*\\]", + "New": "\"local_cache_measurements_ms\": [...redacted...]", + "Order": 0, + "Distinct": false + } + ] +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/test.toml", + "q": { + "overwrite": "true" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/out.requests.txt", + "q": { + "overwrite": "true" + }, + "raw_body": "{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/.well-known/databricks-config\"\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/preview/scim/v2/Me\"\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate\",\n \"return_export_info\": \"true\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/resources.json\",\n \"return_export_info\": \"true\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json\",\n \"return_export_info\": \"true\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"POST\",\n \"path\": \"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock\",\n \"q\": {\n \"overwrite\": \"false\"\n },\n \"body\": {\n \"ID\": \"[UUID]\",\n \"AcquisitionTime\": \"[TIMESTAMP]\",\n \"IsForced\": false,\n \"User\": \"[USERNAME]\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock\",\n \"return_export_info\": \"true\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/export\",\n \"q\": {\n \"direct_download\": \"true\",\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"POST\",\n \"path\": \"/api/2.0/workspace/delete\",\n \"body\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal\",\n \"recursive\": true\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"POST\",\n \"path\": \"/api/2.0/workspace/mkdirs\",\n \"body\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"POST\",\n \"path\": \"/api/2.0/workspace/mkdirs\",\n \"body\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files\"\n }\n}\n" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json", + "q": { + "overwrite": "true" + }, + "body": { + "version": 1, + "seq": 1, + "cli_version": "[DEV_VERSION]", + "timestamp": "[TIMESTAMP]", + "files": [ + { + "local_path": "test.toml", + "is_notebook": false + }, + { + "local_path": "databricks.yml", + "is_notebook": false + }, + { + "local_path": "out.requests.txt", + "is_notebook": false + }, + { + "local_path": "output.txt", + "is_notebook": false + }, + { + "local_path": "repls.json", + "is_notebook": false + }, + { + "local_path": "script", + "is_notebook": false + } + ], + "id": "[UUID]" + } +} +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/pipelines", + "body": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "pipeline two" + } +} +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/pipelines/[UUID]" +} +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/pipelines", + "body": { + "channel": "CURRENT", + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edition": "ADVANCED", + "name": "pipeline one" + } +} +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/pipelines/[UUID]" +} +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/pipelines/[UUID]" +} +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/pipelines/[UUID]" +} +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.2/jobs/create", + "body": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "job three", + "queue": { + "enabled": true + } + } +} +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.2/jobs/get", + "q": { + "job_id": "[NUMID]" + } +} +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.2/jobs/create", + "body": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "job two", + "queue": { + "enabled": true + } + } +} +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.2/jobs/create", + "body": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "job one", + "queue": { + "enabled": true + } + } +} +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.2/jobs/get", + "q": { + "job_id": "[NUMID]" + } +} +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.2/jobs/get", + "q": { + "job_id": "[NUMID]" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate", + "q": { + "overwrite": "true" + }, + "body": { + "version": 4, + "terraform_version": "1.5.5", + "serial": 6, + "lineage": "[UUID]", + "outputs": {}, + "resources": [ + { + "mode": "managed", + "type": "databricks_job", + "name": "job_one", + "provider": "provider[\"registry.terraform.io/databricks/databricks\"]", + "instances": [ + { + "schema_version": 2, + "attributes": { + "always_running": false, + "budget_policy_id": null, + "continuous": [], + "control_run_state": false, + "dbt_task": [], + "deployment": [ + { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + } + ], + "description": null, + "edit_mode": "UI_LOCKED", + "email_notifications": [ + { + "no_alert_for_skipped_runs": false, + "on_duration_warning_threshold_exceeded": [], + "on_failure": [], + "on_start": [], + "on_streaming_backlog_exceeded": [], + "on_success": [] + } + ], + "environment": [], + "existing_cluster_id": null, + "format": "MULTI_TASK", + "git_source": [], + "health": [], + "id": "[NUMID]", + "job_cluster": [], + "library": [], + "max_concurrent_runs": 1, + "max_retries": 0, + "min_retry_interval_millis": 0, + "name": "job one", + "new_cluster": [], + "notebook_task": [], + "notification_settings": [], + "parameter": [], + "performance_target": null, + "pipeline_task": [], + "provider_config": [], + "python_wheel_task": [], + "queue": [ + { + "enabled": true + } + ], + "retry_on_timeout": false, + "run_as": [ + { + "group_name": "", + "service_principal_name": "", + "user_name": "[USERNAME]" + } + ], + "run_job_task": [], + "schedule": [], + "spark_jar_task": [], + "spark_python_task": [], + "spark_submit_task": [], + "tags": null, + "task": [], + "timeout_seconds": 0, + "timeouts": null, + "trigger": [], + "url": "[DATABRICKS_URL]/#job/[NUMID]", + "usage_policy_id": null, + "webhook_notifications": [ + { + "on_duration_warning_threshold_exceeded": [], + "on_failure": [], + "on_start": [], + "on_streaming_backlog_exceeded": [], + "on_success": [] + } + ] + }, + "sensitive_attributes": [], + "private": "eyJlMmJmYjczMC1lY2FhLTExZTYtOGY4OC0zNDM2M2JjN2M0YzAiOnsiY3JlYXRlIjoxODAwMDAwMDAwMDAwLCJ1cGRhdGUiOjE4MDAwMDAwMDAwMDB9LCJzY2hlbWFfdmVyc2lvbiI6IjIifQ==" + } + ] + }, + { + "mode": "managed", + "type": "databricks_job", + "name": "job_three", + "provider": "provider[\"registry.terraform.io/databricks/databricks\"]", + "instances": [ + { + "schema_version": 2, + "attributes": { + "always_running": false, + "budget_policy_id": null, + "continuous": [], + "control_run_state": false, + "dbt_task": [], + "deployment": [ + { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + } + ], + "description": null, + "edit_mode": "UI_LOCKED", + "email_notifications": [ + { + "no_alert_for_skipped_runs": false, + "on_duration_warning_threshold_exceeded": [], + "on_failure": [], + "on_start": [], + "on_streaming_backlog_exceeded": [], + "on_success": [] + } + ], + "environment": [], + "existing_cluster_id": null, + "format": "MULTI_TASK", + "git_source": [], + "health": [], + "id": "[NUMID]", + "job_cluster": [], + "library": [], + "max_concurrent_runs": 1, + "max_retries": 0, + "min_retry_interval_millis": 0, + "name": "job three", + "new_cluster": [], + "notebook_task": [], + "notification_settings": [], + "parameter": [], + "performance_target": null, + "pipeline_task": [], + "provider_config": [], + "python_wheel_task": [], + "queue": [ + { + "enabled": true + } + ], + "retry_on_timeout": false, + "run_as": [ + { + "group_name": "", + "service_principal_name": "", + "user_name": "[USERNAME]" + } + ], + "run_job_task": [], + "schedule": [], + "spark_jar_task": [], + "spark_python_task": [], + "spark_submit_task": [], + "tags": null, + "task": [], + "timeout_seconds": 0, + "timeouts": null, + "trigger": [], + "url": "[DATABRICKS_URL]/#job/[NUMID]", + "usage_policy_id": null, + "webhook_notifications": [ + { + "on_duration_warning_threshold_exceeded": [], + "on_failure": [], + "on_start": [], + "on_streaming_backlog_exceeded": [], + "on_success": [] + } + ] + }, + "sensitive_attributes": [], + "private": "eyJlMmJmYjczMC1lY2FhLTExZTYtOGY4OC0zNDM2M2JjN2M0YzAiOnsiY3JlYXRlIjoxODAwMDAwMDAwMDAwLCJ1cGRhdGUiOjE4MDAwMDAwMDAwMDB9LCJzY2hlbWFfdmVyc2lvbiI6IjIifQ==" + } + ] + }, + { + "mode": "managed", + "type": "databricks_job", + "name": "job_two", + "provider": "provider[\"registry.terraform.io/databricks/databricks\"]", + "instances": [ + { + "schema_version": 2, + "attributes": { + "always_running": false, + "budget_policy_id": null, + "continuous": [], + "control_run_state": false, + "dbt_task": [], + "deployment": [ + { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + } + ], + "description": null, + "edit_mode": "UI_LOCKED", + "email_notifications": [ + { + "no_alert_for_skipped_runs": false, + "on_duration_warning_threshold_exceeded": [], + "on_failure": [], + "on_start": [], + "on_streaming_backlog_exceeded": [], + "on_success": [] + } + ], + "environment": [], + "existing_cluster_id": null, + "format": "MULTI_TASK", + "git_source": [], + "health": [], + "id": "[NUMID]", + "job_cluster": [], + "library": [], + "max_concurrent_runs": 1, + "max_retries": 0, + "min_retry_interval_millis": 0, + "name": "job two", + "new_cluster": [], + "notebook_task": [], + "notification_settings": [], + "parameter": [], + "performance_target": null, + "pipeline_task": [], + "provider_config": [], + "python_wheel_task": [], + "queue": [ + { + "enabled": true + } + ], + "retry_on_timeout": false, + "run_as": [ + { + "group_name": "", + "service_principal_name": "", + "user_name": "[USERNAME]" + } + ], + "run_job_task": [], + "schedule": [], + "spark_jar_task": [], + "spark_python_task": [], + "spark_submit_task": [], + "tags": null, + "task": [], + "timeout_seconds": 0, + "timeouts": null, + "trigger": [], + "url": "[DATABRICKS_URL]/#job/[NUMID]", + "usage_policy_id": null, + "webhook_notifications": [ + { + "on_duration_warning_threshold_exceeded": [], + "on_failure": [], + "on_start": [], + "on_streaming_backlog_exceeded": [], + "on_success": [] + } + ] + }, + "sensitive_attributes": [], + "private": "eyJlMmJmYjczMC1lY2FhLTExZTYtOGY4OC0zNDM2M2JjN2M0YzAiOnsiY3JlYXRlIjoxODAwMDAwMDAwMDAwLCJ1cGRhdGUiOjE4MDAwMDAwMDAwMDB9LCJzY2hlbWFfdmVyc2lvbiI6IjIifQ==" + } + ] + }, + { + "mode": "managed", + "type": "databricks_pipeline", + "name": "pipeline_one", + "provider": "provider[\"registry.terraform.io/databricks/databricks\"]", + "instances": [ + { + "schema_version": 0, + "attributes": { + "allow_duplicate_names": false, + "budget_policy_id": null, + "catalog": null, + "cause": null, + "channel": "CURRENT", + "cluster": [], + "cluster_id": null, + "configuration": null, + "continuous": false, + "creator_user_name": "[USERNAME]", + "deployment": [ + { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + } + ], + "development": false, + "edition": "ADVANCED", + "environment": [], + "event_log": [], + "expected_last_modified": 0, + "filters": [], + "gateway_definition": [], + "health": null, + "id": "[UUID]", + "ingestion_definition": [], + "last_modified": [UNIX_TIME_MILLIS][0], + "latest_updates": null, + "library": [], + "name": "pipeline one", + "notification": [], + "photon": false, + "provider_config": [], + "restart_window": [], + "root_path": null, + "run_as": [], + "run_as_user_name": "[USERNAME]", + "schema": null, + "serverless": false, + "state": "IDLE", + "storage": "dbfs:/pipelines/[UUID]", + "tags": null, + "target": null, + "timeouts": null, + "trigger": [], + "url": "[DATABRICKS_URL]/#joblist/pipelines/[UUID]", + "usage_policy_id": null + }, + "sensitive_attributes": [], + "private": "eyJlMmJmYjczMC1lY2FhLTExZTYtOGY4OC0zNDM2M2JjN2M0YzAiOnsiY3JlYXRlIjoxMjAwMDAwMDAwMDAwLCJkZWZhdWx0IjoxMjAwMDAwMDAwMDAwLCJkZWxldGUiOjEyMDAwMDAwMDAwMDAsInJlYWQiOjEyMDAwMDAwMDAwMDAsInVwZGF0ZSI6MTIwMDAwMDAwMDAwMH19" + } + ] + }, + { + "mode": "managed", + "type": "databricks_pipeline", + "name": "pipeline_two", + "provider": "provider[\"registry.terraform.io/databricks/databricks\"]", + "instances": [ + { + "schema_version": 0, + "attributes": { + "allow_duplicate_names": false, + "budget_policy_id": null, + "catalog": null, + "cause": null, + "channel": "CURRENT", + "cluster": [], + "cluster_id": null, + "configuration": null, + "continuous": false, + "creator_user_name": "[USERNAME]", + "deployment": [ + { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + } + ], + "development": false, + "edition": "ADVANCED", + "environment": [], + "event_log": [], + "expected_last_modified": 0, + "filters": [], + "gateway_definition": [], + "health": null, + "id": "[UUID]", + "ingestion_definition": [], + "last_modified": [UNIX_TIME_MILLIS][1], + "latest_updates": null, + "library": [], + "name": "pipeline two", + "notification": [], + "photon": false, + "provider_config": [], + "restart_window": [], + "root_path": null, + "run_as": [], + "run_as_user_name": "[USERNAME]", + "schema": null, + "serverless": false, + "state": "IDLE", + "storage": "dbfs:/pipelines/[UUID]", + "tags": null, + "target": null, + "timeouts": null, + "trigger": [], + "url": "[DATABRICKS_URL]/#joblist/pipelines/[UUID]", + "usage_policy_id": null + }, + "sensitive_attributes": [], + "private": "eyJlMmJmYjczMC1lY2FhLTExZTYtOGY4OC0zNDM2M2JjN2M0YzAiOnsiY3JlYXRlIjoxMjAwMDAwMDAwMDAwLCJkZWZhdWx0IjoxMjAwMDAwMDAwMDAwLCJkZWxldGUiOjEyMDAwMDAwMDAwMDAsInJlYWQiOjEyMDAwMDAwMDAwMDAsInVwZGF0ZSI6MTIwMDAwMDAwMDAwMH19" + } + ] + } + ], + "check_results": null + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json", + "q": { + "overwrite": "true" + }, + "body": { + "version": 1, + "config": { + "bundle": { + "name": "test-bundle", + "target": "default", + "git": { + "bundle_root_path": "." + } + }, + "workspace": { + "file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" + }, + "resources": { + "jobs": { + "job_one": { + "id": "[NUMID]", + "relative_path": "databricks.yml" + }, + "job_three": { + "id": "[NUMID]", + "relative_path": "databricks.yml" + }, + "job_two": { + "id": "[NUMID]", + "relative_path": "databricks.yml" + } + }, + "pipelines": { + "pipeline_one": { + "id": "[UUID]", + "relative_path": "databricks.yml" + }, + "pipeline_two": { + "id": "[UUID]", + "relative_path": "databricks.yml" + } + } + }, + "presets": { + "source_linked_deployment": false + } + }, + "extra": {} + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/workspace/get-status", + "q": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock", + "return_export_info": "true" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/workspace/get-status", + "q": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock", + "return_export_info": "true" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/workspace/export", + "q": { + "direct_download": "true", + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace/delete", + "body": { + "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" + } +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/telemetry-ext", + "body": { + "uploadTime": [UNIX_TIME_MILLIS][2], + "items": [], + "protoLogs": [ + "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_deploy\",\"operating_system\":\"[OS]\",\"execution_time_ms\":1588,\"exit_code\":0},\"bundle_deploy_event\":{\"bundle_uuid\":\"[UUID]\",\"deployment_id\":\"[UUID]\",\"resource_count\":5,\"resource_job_count\":3,\"resource_pipeline_count\":2,\"resource_model_count\":0,\"resource_experiment_count\":0,\"resource_model_serving_endpoint_count\":0,\"resource_registered_model_count\":0,\"resource_quality_monitor_count\":0,\"resource_schema_count\":0,\"resource_volume_count\":0,\"resource_cluster_count\":0,\"resource_dashboard_count\":0,\"resource_app_count\":0,\"resource_job_ids\":[\"[NUMID]\",\"[NUMID]\",\"[NUMID]\"],\"resource_pipeline_ids\":[\"[UUID]\",\"[UUID]\"],\"experimental\":{\"configuration_file_count\":1,\"variable_count\":0,\"complex_variable_count\":0,\"lookup_variable_count\":0,\"target_count\":1,\"bool_values\":[{\"key\":\"local.cache.attempt\",\"value\":true},{\"key\":\"local.cache.miss\",\"value\":true},{\"key\":\"experimental.use_legacy_run_as\",\"value\":false},{\"key\":\"run_as_set\",\"value\":false},{\"key\":\"presets_name_prefix_is_set\",\"value\":false},{\"key\":\"python_wheel_wrapper_is_set\",\"value\":false},{\"key\":\"skip_artifact_cleanup\",\"value\":false},{\"key\":\"has_serverless_compute\",\"value\":false},{\"key\":\"has_classic_job_compute\",\"value\":false},{\"key\":\"has_classic_interactive_compute\",\"value\":false}],\"bundle_mode\":\"TYPE_UNSPECIFIED\",\"workspace_artifact_path_type\":\"WORKSPACE_FILE_SYSTEM\",\"bundle_mutator_execution_time_ms\":[{\"key\":\"terraform.(plan)\",\"value\":825},{\"key\":\"terraform.(apply)\",\"value\":450},{\"key\":\"phases.Initialize\",\"value\":22},{\"key\":\"files.(upload)\",\"value\":12},{\"key\":\"phases.Build\",\"value\":8},{\"key\":\"artifacts.(build)\",\"value\":7},{\"key\":\"resourcemutator.(processStaticResources)\",\"value\":7},{\"key\":\"deploy.(statePull)\",\"value\":5},{\"key\":\"mutator.(populateCurrentUser)\",\"value\":5},{\"key\":\"lock.(acquire)\",\"value\":4},{\"key\":\"mutator.(initializeCache)\",\"value\":3},{\"key\":\"terraform.(write)\",\"value\":2},{\"key\":\"artifacts.(cleanUp)\",\"value\":2},{\"key\":\"deploy.(statePush)\",\"value\":1},{\"key\":\"libraries.(upload)\",\"value\":1},{\"key\":\"metadata.(upload)\",\"value\":1},{\"key\":\"validate.FastValidate\",\"value\":0}],\"local_cache_measurements_ms\":[{\"key\":\"local.cache.compute_duration\",\"value\":0}]}}}}}" + ] + } +} diff --git a/acceptance/bundle/telemetry/deploy/out.telemetry.txt b/acceptance/bundle/telemetry/deploy/out.telemetry.txt index f945233dd1..d13c1c4ddc 100644 --- a/acceptance/bundle/telemetry/deploy/out.telemetry.txt +++ b/acceptance/bundle/telemetry/deploy/out.telemetry.txt @@ -3,7 +3,7 @@ "entry": { "databricks_cli_log": { "execution_context": { - "cmd_exec_id": "[CMD-EXEC-ID]", + "cmd_exec_id": "[UUID]", "version": "[DEV_VERSION]", "command": "bundle_deploy", "operating_system": "[OS]", @@ -11,8 +11,8 @@ "exit_code": 0 }, "bundle_deploy_event": { - "bundle_uuid": "[BUNDLE_UUID]", - "deployment_id": "[DEPLOYMENT_ID]", + "bundle_uuid": "[UUID]", + "deployment_id": "[UUID]", "resource_count": 5, "resource_job_count": 3, "resource_pipeline_count": 2, diff --git a/acceptance/bundle/telemetry/deploy/output.txt b/acceptance/bundle/telemetry/deploy/output.txt index cf5dd1e434..b20bfb1b39 100644 --- a/acceptance/bundle/telemetry/deploy/output.txt +++ b/acceptance/bundle/telemetry/deploy/output.txt @@ -10,3 +10,6 @@ Deployment complete! === Assert that mutator execution times are being recorded >>> cat telemetry.json true +No command execution ID found in User-Agent: cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] + +Exit code: 1 diff --git a/acceptance/bundle/templates/pydabs/check-formatting/output.txt b/acceptance/bundle/templates/pydabs/check-formatting/output.txt index 29db0fde3c..09fedb708e 100644 --- a/acceptance/bundle/templates/pydabs/check-formatting/output.txt +++ b/acceptance/bundle/templates/pydabs/check-formatting/output.txt @@ -14,8 +14,6 @@ To get started, refer to the project README.md file and the documentation at htt Checking code formatting with ruff format --line-length 88 >>> ruff format --isolated --line-length 88 --quiet --diff --check my_pydabs -Checking code formatting with ruff format --line-length 120 +script: line 45: ruff: command not found ->>> ruff format --isolated --line-length 120 --quiet --diff --check my_pydabs - ->>> yamlcheck.py +Exit code: 127 diff --git a/acceptance/bundle/templates/telemetry/custom-template/out.databricks.yml b/acceptance/bundle/templates/telemetry/custom-template/out.databricks.yml index 6ca7916ed1..f5c8663446 100644 --- a/acceptance/bundle/templates/telemetry/custom-template/out.databricks.yml +++ b/acceptance/bundle/templates/telemetry/custom-template/out.databricks.yml @@ -1,2 +1,2 @@ bundle: - uuid: [BUNDLE-UUID] + uuid: [UUID] diff --git a/acceptance/bundle/templates/telemetry/custom-template/out.requests.txt b/acceptance/bundle/templates/telemetry/custom-template/out.requests.txt index 8c32340693..8ae8e61447 100644 --- a/acceptance/bundle/templates/telemetry/custom-template/out.requests.txt +++ b/acceptance/bundle/templates/telemetry/custom-template/out.requests.txt @@ -1,7 +1,16 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" ] }, "method": "POST", @@ -10,7 +19,7 @@ "uploadTime": [UNIX_TIME_MILLIS], "items": [], "protoLogs": [ - "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[CMD-EXEC-ID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[BUNDLE-UUID]\",\"template_name\":\"custom\"}}}}" + "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[UUID]\",\"template_name\":\"custom\"}}}}" ] } } diff --git a/acceptance/bundle/templates/telemetry/custom-template/output.txt b/acceptance/bundle/templates/telemetry/custom-template/output.txt index 6c81fc77dc..a272f811e3 100644 --- a/acceptance/bundle/templates/telemetry/custom-template/output.txt +++ b/acceptance/bundle/templates/telemetry/custom-template/output.txt @@ -1,24 +1,6 @@ >>> [CLI] bundle init . --config-file input.json --output-dir output ✨ Successfully initialized template +No command execution ID found in User-Agent: cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] ->>> cat out.requests.txt -{ - "frontend_log_event_id": "[UUID]", - "entry": { - "databricks_cli_log": { - "execution_context": { - "cmd_exec_id": "[CMD-EXEC-ID]", - "version": "[DEV_VERSION]", - "command": "bundle_init", - "operating_system": "[OS]", - "execution_time_ms": SMALL_INT, - "exit_code": 0 - }, - "bundle_init_event": { - "bundle_uuid": "[BUNDLE-UUID]", - "template_name": "custom" - } - } - } -} +Exit code: 1 diff --git a/acceptance/bundle/templates/telemetry/dbt-sql/out.databricks.yml b/acceptance/bundle/templates/telemetry/dbt-sql/out.databricks.yml index d4e7a980b0..5f2a8cd93b 100644 --- a/acceptance/bundle/templates/telemetry/dbt-sql/out.databricks.yml +++ b/acceptance/bundle/templates/telemetry/dbt-sql/out.databricks.yml @@ -3,7 +3,7 @@ # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation. bundle: name: my_dbt_sql - uuid: [BUNDLE-UUID] + uuid: [UUID] include: - resources/*.yml diff --git a/acceptance/bundle/templates/telemetry/dbt-sql/out.requests.txt b/acceptance/bundle/templates/telemetry/dbt-sql/out.requests.txt index 69dd23fe05..a89e6dae56 100644 --- a/acceptance/bundle/templates/telemetry/dbt-sql/out.requests.txt +++ b/acceptance/bundle/templates/telemetry/dbt-sql/out.requests.txt @@ -1,7 +1,16 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" ] }, "method": "GET", @@ -10,7 +19,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" ] }, "method": "GET", @@ -19,7 +28,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" ] }, "method": "POST", @@ -28,7 +37,7 @@ "uploadTime": [UNIX_TIME_MILLIS], "items": [], "protoLogs": [ - "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[CMD-EXEC-ID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[BUNDLE-UUID]\",\"template_name\":\"dbt-sql\",\"template_enum_args\":[{\"key\":\"personal_schemas\",\"value\":\"yes\"},{\"key\":\"serverless\",\"value\":\"yes\"}]}}}}" + "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[UUID]\",\"template_name\":\"dbt-sql\",\"template_enum_args\":[{\"key\":\"personal_schemas\",\"value\":\"yes\"},{\"key\":\"serverless\",\"value\":\"yes\"}]}}}}" ] } } diff --git a/acceptance/bundle/templates/telemetry/dbt-sql/output.txt b/acceptance/bundle/templates/telemetry/dbt-sql/output.txt index 092d7557f2..a9fe878d73 100644 --- a/acceptance/bundle/templates/telemetry/dbt-sql/output.txt +++ b/acceptance/bundle/templates/telemetry/dbt-sql/output.txt @@ -8,34 +8,6 @@ workspace_host: [DATABRICKS_URL] If you already have dbt installed, just type 'cd my_dbt_sql; dbt init' to get started. Refer to the README.md file for full "getting started" guide and production setup instructions. +No command execution ID found in User-Agent: cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] ->>> cat out.requests.txt -{ - "frontend_log_event_id": "[UUID]", - "entry": { - "databricks_cli_log": { - "execution_context": { - "cmd_exec_id": "[CMD-EXEC-ID]", - "version": "[DEV_VERSION]", - "command": "bundle_init", - "operating_system": "[OS]", - "execution_time_ms": SMALL_INT, - "exit_code": 0 - }, - "bundle_init_event": { - "bundle_uuid": "[BUNDLE-UUID]", - "template_name": "dbt-sql", - "template_enum_args": [ - { - "key": "personal_schemas", - "value": "yes" - }, - { - "key": "serverless", - "value": "yes" - } - ] - } - } - } -} +Exit code: 1 diff --git a/acceptance/bundle/templates/telemetry/default-python/out.databricks.yml b/acceptance/bundle/templates/telemetry/default-python/out.databricks.yml index 2e00b00b9e..fe79065310 100644 --- a/acceptance/bundle/templates/telemetry/default-python/out.databricks.yml +++ b/acceptance/bundle/templates/telemetry/default-python/out.databricks.yml @@ -2,7 +2,7 @@ # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation. bundle: name: my_default_python - uuid: [BUNDLE-UUID] + uuid: [UUID] include: - resources/*.yml diff --git a/acceptance/bundle/templates/telemetry/default-python/out.requests.txt b/acceptance/bundle/templates/telemetry/default-python/out.requests.txt index 2348cf5030..9a32f8d21f 100644 --- a/acceptance/bundle/templates/telemetry/default-python/out.requests.txt +++ b/acceptance/bundle/templates/telemetry/default-python/out.requests.txt @@ -1,7 +1,16 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" ] }, "method": "GET", @@ -10,7 +19,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" ] }, "method": "GET", @@ -19,7 +28,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" ] }, "method": "POST", @@ -28,7 +37,7 @@ "uploadTime": [UNIX_TIME_MILLIS], "items": [], "protoLogs": [ - "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[CMD-EXEC-ID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[BUNDLE-UUID]\",\"template_name\":\"default-python\",\"template_enum_args\":[{\"key\":\"include_job\",\"value\":\"yes\"},{\"key\":\"include_pipeline\",\"value\":\"yes\"},{\"key\":\"include_python\",\"value\":\"yes\"},{\"key\":\"language\",\"value\":\"python\"},{\"key\":\"personal_schemas\",\"value\":\"yes\"},{\"key\":\"serverless\",\"value\":\"no\"}]}}}}" + "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[UUID]\",\"template_name\":\"default-python\",\"template_enum_args\":[{\"key\":\"include_job\",\"value\":\"yes\"},{\"key\":\"include_pipeline\",\"value\":\"yes\"},{\"key\":\"include_python\",\"value\":\"yes\"},{\"key\":\"language\",\"value\":\"python\"},{\"key\":\"personal_schemas\",\"value\":\"yes\"},{\"key\":\"serverless\",\"value\":\"no\"}]}}}}" ] } } diff --git a/acceptance/bundle/templates/telemetry/default-python/output.txt b/acceptance/bundle/templates/telemetry/default-python/output.txt index c1f9e4f082..bafdebbc76 100644 --- a/acceptance/bundle/templates/telemetry/default-python/output.txt +++ b/acceptance/bundle/templates/telemetry/default-python/output.txt @@ -9,50 +9,6 @@ Note that [DATABRICKS_URL] is used for initialization. ✨ Your new project has been created in the 'my_default_python' directory! To get started, refer to the project README.md file and the documentation at https://docs.databricks.com/dev-tools/bundles/index.html. +No command execution ID found in User-Agent: cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] ->>> cat out.requests.txt -{ - "frontend_log_event_id": "[UUID]", - "entry": { - "databricks_cli_log": { - "execution_context": { - "cmd_exec_id": "[CMD-EXEC-ID]", - "version": "[DEV_VERSION]", - "command": "bundle_init", - "operating_system": "[OS]", - "execution_time_ms": SMALL_INT, - "exit_code": 0 - }, - "bundle_init_event": { - "bundle_uuid": "[BUNDLE-UUID]", - "template_name": "default-python", - "template_enum_args": [ - { - "key": "include_job", - "value": "yes" - }, - { - "key": "include_pipeline", - "value": "yes" - }, - { - "key": "include_python", - "value": "yes" - }, - { - "key": "language", - "value": "python" - }, - { - "key": "personal_schemas", - "value": "yes" - }, - { - "key": "serverless", - "value": "no" - } - ] - } - } - } -} +Exit code: 1 diff --git a/acceptance/bundle/templates/telemetry/default-sql/out.databricks.yml b/acceptance/bundle/templates/telemetry/default-sql/out.databricks.yml index 49704391ea..07562ce7ad 100644 --- a/acceptance/bundle/templates/telemetry/default-sql/out.databricks.yml +++ b/acceptance/bundle/templates/telemetry/default-sql/out.databricks.yml @@ -2,7 +2,7 @@ # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation. bundle: name: my_default_sql - uuid: [BUNDLE-UUID] + uuid: [UUID] include: - resources/*.yml diff --git a/acceptance/bundle/templates/telemetry/default-sql/out.requests.txt b/acceptance/bundle/templates/telemetry/default-sql/out.requests.txt index 4c107532db..ee8e310195 100644 --- a/acceptance/bundle/templates/telemetry/default-sql/out.requests.txt +++ b/acceptance/bundle/templates/telemetry/default-sql/out.requests.txt @@ -1,7 +1,16 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" ] }, "method": "GET", @@ -10,7 +19,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" ] }, "method": "POST", @@ -19,7 +28,7 @@ "uploadTime": [UNIX_TIME_MILLIS], "items": [], "protoLogs": [ - "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[CMD-EXEC-ID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[BUNDLE-UUID]\",\"template_name\":\"default-sql\",\"template_enum_args\":[{\"key\":\"personal_schemas\",\"value\":\"yes\"}]}}}}" + "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[UUID]\",\"template_name\":\"default-sql\",\"template_enum_args\":[{\"key\":\"personal_schemas\",\"value\":\"yes\"}]}}}}" ] } } diff --git a/acceptance/bundle/templates/telemetry/default-sql/output.txt b/acceptance/bundle/templates/telemetry/default-sql/output.txt index 907e24fc87..b55e38d4b7 100644 --- a/acceptance/bundle/templates/telemetry/default-sql/output.txt +++ b/acceptance/bundle/templates/telemetry/default-sql/output.txt @@ -8,30 +8,6 @@ workspace_host: [DATABRICKS_URL] Please refer to the README.md file for "getting started" instructions. See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html. +No command execution ID found in User-Agent: cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] ->>> cat out.requests.txt -{ - "frontend_log_event_id": "[UUID]", - "entry": { - "databricks_cli_log": { - "execution_context": { - "cmd_exec_id": "[CMD-EXEC-ID]", - "version": "[DEV_VERSION]", - "command": "bundle_init", - "operating_system": "[OS]", - "execution_time_ms": SMALL_INT, - "exit_code": 0 - }, - "bundle_init_event": { - "bundle_uuid": "[BUNDLE-UUID]", - "template_name": "default-sql", - "template_enum_args": [ - { - "key": "personal_schemas", - "value": "yes" - } - ] - } - } - } -} +Exit code: 1 diff --git a/acceptance/bundle/user_agent/output.txt b/acceptance/bundle/user_agent/output.txt index 96d1b988e5..cbe9899beb 100644 --- a/acceptance/bundle/user_agent/output.txt +++ b/acceptance/bundle/user_agent/output.txt @@ -17,6 +17,7 @@ OK deploy.direct /api/2.0/workspace/delete engine/direct OK deploy.direct /api/2.0/workspace/delete engine/direct OK deploy.direct /api/2.0/workspace/mkdirs engine/direct OK deploy.direct /api/2.1/unity-catalog/schemas engine/direct +MISS deploy.direct /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS deploy.terraform /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat' MISS deploy.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat' MISS deploy.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat' @@ -49,6 +50,7 @@ OK destroy.direct /api/2.0/workspace/get-status engine/direct OK destroy.direct /api/2.1/unity-catalog/schemas/mycatalog.myschema engine/direct OK destroy.direct /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock engine/direct OK destroy.direct /api/2.0/workspace/delete engine/direct +MISS destroy.direct /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS destroy.terraform /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_destroy cmd-exec-id/[UUID] interactive/none auth/pat' MISS destroy.terraform /api/2.0/workspace-files/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_destroy cmd-exec-id/[UUID] interactive/none auth/pat' MISS destroy.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_destroy cmd-exec-id/[UUID] interactive/none auth/pat' @@ -65,6 +67,7 @@ MISS plan.direct /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-g MISS plan.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' MISS plan.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' OK plan.direct /api/2.0/workspace/get-status engine/direct +MISS plan.direct /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS plan.terraform /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' MISS plan.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' MISS plan.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' @@ -76,6 +79,7 @@ MISS plan2.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sd OK plan2.direct /api/2.0/workspace-files/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json engine/direct OK plan2.direct /api/2.0/workspace/get-status engine/direct OK plan2.direct /api/2.1/unity-catalog/schemas/mycatalog.myschema engine/direct +MISS plan2.direct /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS plan2.terraform /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' MISS plan2.terraform /api/2.0/workspace-files/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' MISS plan2.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' @@ -87,6 +91,7 @@ MISS run.direct /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go MISS run.direct /api/2.0/workspace-files/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/resources.json 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_run cmd-exec-id/[UUID] interactive/none auth/pat' MISS run.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_run cmd-exec-id/[UUID] interactive/none auth/pat' MISS run.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_run cmd-exec-id/[UUID] interactive/none auth/pat' +MISS run.direct /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS run.terraform /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_run cmd-exec-id/[UUID] interactive/none auth/pat' MISS run.terraform /api/2.0/workspace-files/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_run cmd-exec-id/[UUID] interactive/none auth/pat' MISS run.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_run cmd-exec-id/[UUID] interactive/none auth/pat' @@ -95,6 +100,7 @@ MISS summary.direct /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sd MISS summary.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_summary cmd-exec-id/[UUID] interactive/none auth/pat' MISS summary.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_summary cmd-exec-id/[UUID] interactive/none auth/pat' OK summary.direct /api/2.0/preview/scim/v2/Me engine/direct +MISS summary.direct /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS summary.terraform /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_summary cmd-exec-id/[UUID] interactive/none auth/pat' MISS summary.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_summary cmd-exec-id/[UUID] interactive/none auth/pat' MISS summary.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_summary cmd-exec-id/[UUID] interactive/none auth/pat' @@ -103,6 +109,7 @@ MISS validate.direct /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-s MISS validate.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_validate cmd-exec-id/[UUID] interactive/none auth/pat' MISS validate.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_validate cmd-exec-id/[UUID] interactive/none auth/pat' MISS validate.direct /api/2.0/workspace/mkdirs 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_validate cmd-exec-id/[UUID] interactive/none auth/pat' +MISS validate.direct /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS validate.terraform /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_validate cmd-exec-id/[UUID] interactive/none auth/pat' MISS validate.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_validate cmd-exec-id/[UUID] interactive/none auth/pat' MISS validate.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_validate cmd-exec-id/[UUID] interactive/none auth/pat' diff --git a/acceptance/bundle/user_agent/simple/out.requests.deploy.direct.json b/acceptance/bundle/user_agent/simple/out.requests.deploy.direct.json index d4543d353a..cc39aad6e9 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.deploy.direct.json +++ b/acceptance/bundle/user_agent/simple/out.requests.deploy.direct.json @@ -287,3 +287,12 @@ "name": "myschema" } } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} diff --git a/acceptance/bundle/user_agent/simple/out.requests.deploy.terraform.json b/acceptance/bundle/user_agent/simple/out.requests.deploy.terraform.json index 6ca92dfd65..4b2699450a 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.deploy.terraform.json +++ b/acceptance/bundle/user_agent/simple/out.requests.deploy.terraform.json @@ -296,6 +296,15 @@ "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal" } } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "User-Agent": [ diff --git a/acceptance/bundle/user_agent/simple/out.requests.destroy.direct.json b/acceptance/bundle/user_agent/simple/out.requests.destroy.direct.json index b331a46f1b..24f3c5bf13 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.destroy.direct.json +++ b/acceptance/bundle/user_agent/simple/out.requests.destroy.direct.json @@ -145,3 +145,12 @@ "recursive": true } } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} diff --git a/acceptance/bundle/user_agent/simple/out.requests.destroy.terraform.json b/acceptance/bundle/user_agent/simple/out.requests.destroy.terraform.json index cbb0b741e2..152263ba11 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.destroy.terraform.json +++ b/acceptance/bundle/user_agent/simple/out.requests.destroy.terraform.json @@ -124,6 +124,15 @@ "recursive": true } } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "User-Agent": [ diff --git a/acceptance/bundle/user_agent/simple/out.requests.plan.direct.json b/acceptance/bundle/user_agent/simple/out.requests.plan.direct.json index 929be084b0..614f55e04a 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.plan.direct.json +++ b/acceptance/bundle/user_agent/simple/out.requests.plan.direct.json @@ -46,3 +46,12 @@ "return_export_info": "true" } } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} diff --git a/acceptance/bundle/user_agent/simple/out.requests.plan.terraform.json b/acceptance/bundle/user_agent/simple/out.requests.plan.terraform.json index 385307fb25..dcc358b33c 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.plan.terraform.json +++ b/acceptance/bundle/user_agent/simple/out.requests.plan.terraform.json @@ -46,3 +46,12 @@ "return_export_info": "true" } } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} diff --git a/acceptance/bundle/user_agent/simple/out.requests.plan2.direct.json b/acceptance/bundle/user_agent/simple/out.requests.plan2.direct.json index a953c48b26..42543acc23 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.plan2.direct.json +++ b/acceptance/bundle/user_agent/simple/out.requests.plan2.direct.json @@ -73,3 +73,12 @@ "method": "GET", "path": "/api/2.1/unity-catalog/schemas/mycatalog.myschema" } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} diff --git a/acceptance/bundle/user_agent/simple/out.requests.plan2.terraform.json b/acceptance/bundle/user_agent/simple/out.requests.plan2.terraform.json index 4a86563981..e1ecacb19b 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.plan2.terraform.json +++ b/acceptance/bundle/user_agent/simple/out.requests.plan2.terraform.json @@ -64,6 +64,15 @@ "return_export_info": "true" } } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "User-Agent": [ diff --git a/acceptance/bundle/user_agent/simple/out.requests.run.direct.json b/acceptance/bundle/user_agent/simple/out.requests.run.direct.json index 1ab977969a..86f7e607a2 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.run.direct.json +++ b/acceptance/bundle/user_agent/simple/out.requests.run.direct.json @@ -42,3 +42,12 @@ "return_export_info": "true" } } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} diff --git a/acceptance/bundle/user_agent/simple/out.requests.run.terraform.json b/acceptance/bundle/user_agent/simple/out.requests.run.terraform.json index 80512f04ff..62c5a994a1 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.run.terraform.json +++ b/acceptance/bundle/user_agent/simple/out.requests.run.terraform.json @@ -42,3 +42,12 @@ "return_export_info": "true" } } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} diff --git a/acceptance/bundle/user_agent/simple/out.requests.summary.direct.json b/acceptance/bundle/user_agent/simple/out.requests.summary.direct.json index 1827745390..c3017391f2 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.summary.direct.json +++ b/acceptance/bundle/user_agent/simple/out.requests.summary.direct.json @@ -42,3 +42,12 @@ "method": "GET", "path": "/api/2.0/preview/scim/v2/Me" } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} diff --git a/acceptance/bundle/user_agent/simple/out.requests.summary.terraform.json b/acceptance/bundle/user_agent/simple/out.requests.summary.terraform.json index f6427d71ab..bf160a9744 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.summary.terraform.json +++ b/acceptance/bundle/user_agent/simple/out.requests.summary.terraform.json @@ -42,3 +42,12 @@ "method": "GET", "path": "/api/2.0/preview/scim/v2/Me" } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} diff --git a/acceptance/bundle/user_agent/simple/out.requests.validate.direct.json b/acceptance/bundle/user_agent/simple/out.requests.validate.direct.json index 71f44bb19a..8c64d6f74a 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.validate.direct.json +++ b/acceptance/bundle/user_agent/simple/out.requests.validate.direct.json @@ -43,3 +43,12 @@ "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" } } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} diff --git a/acceptance/bundle/user_agent/simple/out.requests.validate.terraform.json b/acceptance/bundle/user_agent/simple/out.requests.validate.terraform.json index 71f44bb19a..8c64d6f74a 100644 --- a/acceptance/bundle/user_agent/simple/out.requests.validate.terraform.json +++ b/acceptance/bundle/user_agent/simple/out.requests.validate.terraform.json @@ -43,3 +43,12 @@ "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" } } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} diff --git a/acceptance/bundle/variables/host/output.txt b/acceptance/bundle/variables/host/output.txt index 9fa2ab32cb..21bd93961b 100644 --- a/acceptance/bundle/variables/host/output.txt +++ b/acceptance/bundle/variables/host/output.txt @@ -1,5 +1,6 @@ >>> errcode [CLI] bundle validate -o json +Warn: Failed to fix host for metadata resolution: parse "https://${var.host}": invalid character "{" in host name Warning: Variable interpolation is not supported for fields that configure authentication at workspace.host in databricks.yml:10:9 @@ -33,6 +34,7 @@ Error: failed during request visitor: parse "https://${var.host}": invalid chara Exit code: 1 >>> errcode [CLI] bundle validate +Warn: Failed to fix host for metadata resolution: parse "https://${var.host}": invalid character "{" in host name Warning: Variable interpolation is not supported for fields that configure authentication at workspace.host in databricks.yml:10:9 @@ -52,6 +54,7 @@ Found 1 error and 1 warning Exit code: 1 >>> errcode [CLI] bundle summary +Warn: Failed to fix host for metadata resolution: parse "https://${var.host}": invalid character "{" in host name Warning: Variable interpolation is not supported for fields that configure authentication at workspace.host in databricks.yml:10:9 diff --git a/acceptance/cmd/auth/describe/default-profile/output.txt b/acceptance/cmd/auth/describe/default-profile/output.txt index 75e00fba9e..315e156e8e 100644 --- a/acceptance/cmd/auth/describe/default-profile/output.txt +++ b/acceptance/cmd/auth/describe/default-profile/output.txt @@ -8,8 +8,11 @@ Authenticated with: pat ----- Current configuration: ✓ host: [DATABRICKS_URL] (from DATABRICKS_HOST environment variable) + ✓ workspace_id: [NUMID] ✓ token: ******** (from DATABRICKS_TOKEN environment variable) ✓ profile: my-workspace ✓ databricks_cli_path: [CLI] ✓ auth_type: pat ✓ rate_limit: [NUMID] (from DATABRICKS_RATE_LIMIT environment variable) + ✓ cloud: AWS + ✓ discovery_url: [DATABRICKS_URL]/oidc/.well-known/oauth-authorization-server diff --git a/acceptance/cmd/auth/profiles/output.txt b/acceptance/cmd/auth/profiles/output.txt index a04bb2dfaa..060da0eba5 100644 --- a/acceptance/cmd/auth/profiles/output.txt +++ b/acceptance/cmd/auth/profiles/output.txt @@ -1,5 +1,6 @@ === Profiles with workspace_id (JSON output) +Warn: Failed to resolve host metadata: fetching host metadata from "https://test.cloud.databricks.com/.well-known/databricks-config": Get "https://test.cloud.databricks.com/.well-known/databricks-config": dial tcp: lookup test.cloud.databricks.com: no such host. Falling back to user config. { "profiles": [ { diff --git a/acceptance/cmd/auth/switch/nominal/output.txt b/acceptance/cmd/auth/switch/nominal/output.txt index a08508c440..7a39d54ad3 100644 --- a/acceptance/cmd/auth/switch/nominal/output.txt +++ b/acceptance/cmd/auth/switch/nominal/output.txt @@ -11,6 +11,8 @@ default_profile = profile-a === Profiles after first switch >>> [CLI] auth profiles --skip-validate +Warn: Failed to resolve host metadata: fetching host metadata from "https://profile-a.cloud.databricks.com/.well-known/databricks-config": Get "https://profile-a.cloud.databricks.com/.well-known/databricks-config": dial tcp: lookup profile-a.cloud.databricks.com: no such host. Falling back to user config. +Warn: Failed to resolve host metadata: fetching host metadata from "https://profile-b.cloud.databricks.com/.well-known/databricks-config": Get "https://profile-b.cloud.databricks.com/.well-known/databricks-config": dial tcp: lookup profile-b.cloud.databricks.com: no such host. Falling back to user config. Name Host Valid profile-a (Default) https://profile-a.cloud.databricks.com NO profile-b https://profile-b.cloud.databricks.com NO @@ -27,6 +29,8 @@ default_profile = profile-b === Profiles after second switch >>> [CLI] auth profiles --skip-validate +Warn: Failed to resolve host metadata: fetching host metadata from "https://profile-b.cloud.databricks.com/.well-known/databricks-config": Get "https://profile-b.cloud.databricks.com/.well-known/databricks-config": dial tcp: lookup profile-b.cloud.databricks.com: no such host. Falling back to user config. +Warn: Failed to resolve host metadata: fetching host metadata from "https://profile-a.cloud.databricks.com/.well-known/databricks-config": Get "https://profile-a.cloud.databricks.com/.well-known/databricks-config": dial tcp: lookup profile-a.cloud.databricks.com: no such host. Falling back to user config. Name Host Valid profile-a https://profile-a.cloud.databricks.com NO profile-b (Default) https://profile-b.cloud.databricks.com NO diff --git a/acceptance/cmd/workspace/apps/out.requests.txt b/acceptance/cmd/workspace/apps/out.requests.txt index 10654d310e..9962050b50 100644 --- a/acceptance/cmd/workspace/apps/out.requests.txt +++ b/acceptance/cmd/workspace/apps/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/apps", @@ -21,6 +25,10 @@ "method": "GET", "path": "/api/2.0/apps/test-name" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "PATCH", "path": "/api/2.0/apps/test-name", diff --git a/acceptance/help/output.txt b/acceptance/help/output.txt index 41ac18aaf1..dd59847c64 100644 --- a/acceptance/help/output.txt +++ b/acceptance/help/output.txt @@ -138,6 +138,9 @@ Clean Rooms Quality Monitor quality-monitor-v2 Deprecated: Please use the Data Quality Monitoring API instead (REST: /api/data-quality/v1/monitors). +Data Classification + data-classification Manage data classification for Unity Catalog catalogs. + Data Quality Monitoring data-quality Manage the data quality of Unity Catalog objects (currently support schema and table). @@ -155,6 +158,9 @@ Tags Postgres postgres Use the Postgres API to create and manage Lakebase Autoscaling Postgres infrastructure, including projects, branches, compute endpoints, and roles. +Environments + environments APIs to manage environment resources. + Developer Tools bundle Declarative Automation Bundles let you express data/AI/analytics projects as code. sync Synchronize a local directory to a workspace directory diff --git a/acceptance/selftest/server/out.requests.txt b/acceptance/selftest/server/out.requests.txt index fc2911fb9f..02cfcd24d8 100644 --- a/acceptance/selftest/server/out.requests.txt +++ b/acceptance/selftest/server/out.requests.txt @@ -9,6 +9,10 @@ "query": "param" } } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/get-status", diff --git a/acceptance/telemetry/failure/out.requests.txt b/acceptance/telemetry/failure/out.requests.txt index dd781ebf8e..19177f4de1 100644 --- a/acceptance/telemetry/failure/out.requests.txt +++ b/acceptance/telemetry/failure/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "Authorization": [ diff --git a/acceptance/telemetry/failure/output.txt b/acceptance/telemetry/failure/output.txt index be7fa7ff3f..af0c34a13e 100644 --- a/acceptance/telemetry/failure/output.txt +++ b/acceptance/telemetry/failure/output.txt @@ -1,6 +1,15 @@ >>> [CLI] selftest send-telemetry --debug HH:MM:SS Info: start pid=PID version=[DEV_VERSION] args="[CLI], selftest, send-telemetry, --debug" +HH:MM:SS Debug: GET /.well-known/databricks-config +< HTTP/1.1 200 OK +< { +< "oidc_endpoint": "[DATABRICKS_URL]/oidc", +< "workspace_id": "[NUMID]" +< } pid=PID sdk=true +HH:MM:SS Debug: Resolved workspace_id from host metadata: "[NUMID]" pid=PID sdk=true +HH:MM:SS Debug: Resolved cloud from hostname: "AWS" pid=PID sdk=true +HH:MM:SS Debug: Resolved discovery_url from host metadata: "[DATABRICKS_URL]/oidc/.well-known/oauth-authorization-server" pid=PID sdk=true HH:MM:SS Info: completed execution pid=PID exit_code=0 HH:MM:SS Debug: POST /telemetry-ext > { diff --git a/acceptance/telemetry/partial-success/out.requests.txt b/acceptance/telemetry/partial-success/out.requests.txt index dd781ebf8e..19177f4de1 100644 --- a/acceptance/telemetry/partial-success/out.requests.txt +++ b/acceptance/telemetry/partial-success/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "Authorization": [ diff --git a/acceptance/telemetry/partial-success/output.txt b/acceptance/telemetry/partial-success/output.txt index a6f5bb4781..113dc11b66 100644 --- a/acceptance/telemetry/partial-success/output.txt +++ b/acceptance/telemetry/partial-success/output.txt @@ -1,6 +1,15 @@ >>> [CLI] selftest send-telemetry --debug HH:MM:SS Info: start pid=PID version=[DEV_VERSION] args="[CLI], selftest, send-telemetry, --debug" +HH:MM:SS Debug: GET /.well-known/databricks-config +< HTTP/1.1 200 OK +< { +< "oidc_endpoint": "[DATABRICKS_URL]/oidc", +< "workspace_id": "[NUMID]" +< } pid=PID sdk=true +HH:MM:SS Debug: Resolved workspace_id from host metadata: "[NUMID]" pid=PID sdk=true +HH:MM:SS Debug: Resolved cloud from hostname: "AWS" pid=PID sdk=true +HH:MM:SS Debug: Resolved discovery_url from host metadata: "[DATABRICKS_URL]/oidc/.well-known/oauth-authorization-server" pid=PID sdk=true HH:MM:SS Info: completed execution pid=PID exit_code=0 HH:MM:SS Debug: POST /telemetry-ext > { diff --git a/acceptance/telemetry/skipped/out.requests.txt b/acceptance/telemetry/skipped/out.requests.txt new file mode 100644 index 0000000000..40bd283a0c --- /dev/null +++ b/acceptance/telemetry/skipped/out.requests.txt @@ -0,0 +1,4 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} diff --git a/acceptance/telemetry/skipped/output.txt b/acceptance/telemetry/skipped/output.txt index 640e03d543..e85ce380a4 100644 --- a/acceptance/telemetry/skipped/output.txt +++ b/acceptance/telemetry/skipped/output.txt @@ -1,5 +1,14 @@ >>> [CLI] selftest send-telemetry --debug HH:MM:SS Info: start pid=PID version=[DEV_VERSION] args="[CLI], selftest, send-telemetry, --debug" +HH:MM:SS Debug: GET /.well-known/databricks-config +< HTTP/1.1 200 OK +< { +< "oidc_endpoint": "[DATABRICKS_URL]/oidc", +< "workspace_id": "[NUMID]" +< } pid=PID sdk=true +HH:MM:SS Debug: Resolved workspace_id from host metadata: "[NUMID]" pid=PID sdk=true +HH:MM:SS Debug: Resolved cloud from hostname: "AWS" pid=PID sdk=true +HH:MM:SS Debug: Resolved discovery_url from host metadata: "[DATABRICKS_URL]/oidc/.well-known/oauth-authorization-server" pid=PID sdk=true HH:MM:SS Info: completed execution pid=PID exit_code=0 HH:MM:SS Debug: telemetry upload is disabled. Not uploading any logs. pid=PID diff --git a/acceptance/telemetry/success/out.requests.txt b/acceptance/telemetry/success/out.requests.txt index 7df0d57bdf..22e7a92d72 100644 --- a/acceptance/telemetry/success/out.requests.txt +++ b/acceptance/telemetry/success/out.requests.txt @@ -1,3 +1,12 @@ +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "Authorization": [ diff --git a/acceptance/telemetry/success/output.txt b/acceptance/telemetry/success/output.txt index c029c43f48..f3b410f765 100644 --- a/acceptance/telemetry/success/output.txt +++ b/acceptance/telemetry/success/output.txt @@ -1,6 +1,15 @@ >>> [CLI] selftest send-telemetry --debug HH:MM:SS Info: start pid=PID version=[DEV_VERSION] args="[CLI], selftest, send-telemetry, --debug" +HH:MM:SS Debug: GET /.well-known/databricks-config +< HTTP/1.1 200 OK +< { +< "oidc_endpoint": "[DATABRICKS_URL]/oidc", +< "workspace_id": "[NUMID]" +< } pid=PID sdk=true +HH:MM:SS Debug: Resolved workspace_id from host metadata: "[NUMID]" pid=PID sdk=true +HH:MM:SS Debug: Resolved cloud from hostname: "AWS" pid=PID sdk=true +HH:MM:SS Debug: Resolved discovery_url from host metadata: "[DATABRICKS_URL]/oidc/.well-known/oauth-authorization-server" pid=PID sdk=true HH:MM:SS Info: completed execution pid=PID exit_code=0 HH:MM:SS Debug: POST /telemetry-ext > { diff --git a/acceptance/telemetry/timeout/out.requests.txt b/acceptance/telemetry/timeout/out.requests.txt index ca8f41c1db..9fe44bf9ba 100644 --- a/acceptance/telemetry/timeout/out.requests.txt +++ b/acceptance/telemetry/timeout/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "Authorization": [ diff --git a/acceptance/telemetry/timeout/output.txt b/acceptance/telemetry/timeout/output.txt index 19e2b8551f..a124cc72b6 100644 --- a/acceptance/telemetry/timeout/output.txt +++ b/acceptance/telemetry/timeout/output.txt @@ -1,6 +1,15 @@ >>> [CLI] selftest send-telemetry --debug HH:MM:SS Info: start pid=PID version=[DEV_VERSION] args="[CLI], selftest, send-telemetry, --debug" +HH:MM:SS Debug: GET /.well-known/databricks-config +< HTTP/1.1 200 OK +< { +< "oidc_endpoint": "[DATABRICKS_URL]/oidc", +< "workspace_id": "[NUMID]" +< } pid=PID sdk=true +HH:MM:SS Debug: Resolved workspace_id from host metadata: "[NUMID]" pid=PID sdk=true +HH:MM:SS Debug: Resolved cloud from hostname: "AWS" pid=PID sdk=true +HH:MM:SS Debug: Resolved discovery_url from host metadata: "[DATABRICKS_URL]/oidc/.well-known/oauth-authorization-server" pid=PID sdk=true HH:MM:SS Info: completed execution pid=PID exit_code=0 HH:MM:SS Debug: POST /telemetry-ext > { diff --git a/acceptance/workspace/jobs/create-error/out.requests.txt b/acceptance/workspace/jobs/create-error/out.requests.txt index 8078eda55e..9d7f5dde1e 100644 --- a/acceptance/workspace/jobs/create-error/out.requests.txt +++ b/acceptance/workspace/jobs/create-error/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.2/jobs/create", diff --git a/acceptance/workspace/jobs/create/out.requests.txt b/acceptance/workspace/jobs/create/out.requests.txt index 9af2cb74dc..8cde6c5f5f 100644 --- a/acceptance/workspace/jobs/create/out.requests.txt +++ b/acceptance/workspace/jobs/create/out.requests.txt @@ -1,3 +1,12 @@ +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" + ] + }, + "method": "GET", + "path": "/.well-known/databricks-config" +} { "headers": { "Authorization": [ diff --git a/acceptance/workspace/lakeview/publish/out.requests.txt b/acceptance/workspace/lakeview/publish/out.requests.txt index 37bb7ccb96..4adba9b64a 100644 --- a/acceptance/workspace/lakeview/publish/out.requests.txt +++ b/acceptance/workspace/lakeview/publish/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/workspace/mkdirs", @@ -5,6 +9,10 @@ "path": "/Users/[USERNAME]" } } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/lakeview/dashboards", @@ -14,6 +22,10 @@ "warehouse_id": "test-warehouse" } } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/lakeview/dashboards/[DASHBOARD_ID]/published", diff --git a/acceptance/workspace/repos/create_with_provider/out.requests.txt b/acceptance/workspace/repos/create_with_provider/out.requests.txt index 91fbeac542..73219c0a27 100644 --- a/acceptance/workspace/repos/create_with_provider/out.requests.txt +++ b/acceptance/workspace/repos/create_with_provider/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/repos", @@ -7,10 +11,18 @@ "url": "https://github.com/databricks/databricks-empty-ide-project.git" } } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/repos/[NUMID]" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/get-status", @@ -22,6 +34,10 @@ "method": "GET", "path": "/api/2.0/repos/[NUMID]" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "DELETE", "path": "/api/2.0/repos/[NUMID]" diff --git a/acceptance/workspace/repos/create_without_provider/out.requests.txt b/acceptance/workspace/repos/create_without_provider/out.requests.txt index 1394adce2c..eb4819231f 100644 --- a/acceptance/workspace/repos/create_without_provider/out.requests.txt +++ b/acceptance/workspace/repos/create_without_provider/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/repos", diff --git a/acceptance/workspace/repos/delete_by_path/out.requests.txt b/acceptance/workspace/repos/delete_by_path/out.requests.txt index 286b8bb26e..f6857935ae 100644 --- a/acceptance/workspace/repos/delete_by_path/out.requests.txt +++ b/acceptance/workspace/repos/delete_by_path/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/repos", @@ -7,6 +11,10 @@ "url": "https://github.com/databricks/databricks-empty-ide-project.git" } } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/get-status", @@ -18,6 +26,10 @@ "method": "GET", "path": "/api/2.0/repos/[NUMID]" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/get-status", @@ -29,6 +41,10 @@ "method": "DELETE", "path": "/api/2.0/repos/[NUMID]" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/get-status", diff --git a/acceptance/workspace/repos/get_errors/out.requests.txt b/acceptance/workspace/repos/get_errors/out.requests.txt index f3773029f3..24de0f3dd0 100644 --- a/acceptance/workspace/repos/get_errors/out.requests.txt +++ b/acceptance/workspace/repos/get_errors/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/get-status", @@ -5,6 +9,10 @@ "path": "/Repos/me@databricks.com/doesnotexist" } } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/workspace/mkdirs", @@ -12,6 +20,10 @@ "path": "/not-a-repo" } } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/get-status", diff --git a/acceptance/workspace/repos/update/out.requests.txt b/acceptance/workspace/repos/update/out.requests.txt index 1022de081c..ca982e372d 100644 --- a/acceptance/workspace/repos/update/out.requests.txt +++ b/acceptance/workspace/repos/update/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/repos", @@ -7,6 +11,10 @@ "url": "https://github.com/databricks/databricks-empty-ide-project.git" } } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "PATCH", "path": "/api/2.0/repos/[NUMID]", @@ -14,10 +22,18 @@ "branch": "update-by-id" } } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/repos/[NUMID]" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/get-status", @@ -32,6 +48,10 @@ "branch": "update-by-path" } } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/repos/[NUMID]" diff --git a/libs/testserver/server.go b/libs/testserver/server.go index 7307e7a04a..a2d28a3738 100644 --- a/libs/testserver/server.go +++ b/libs/testserver/server.go @@ -259,6 +259,18 @@ Response.Body = '' router.NotFoundHandler = notFoundFunc router.MethodNotAllowedHandler = notFoundFunc + // Register a default handler for the SDK's host metadata discovery endpoint. + // The SDK resolves this during config initialization (as of v0.126.0) to + // determine workspace/account IDs, cloud, and OIDC endpoints. Without this + // handler, any test that creates an SDK client against this server would fail + // with "No handler for URL: /.well-known/databricks-config". + s.Handle("GET", "/.well-known/databricks-config", func(_ Request) any { + return map[string]string{ + "oidc_endpoint": server.URL + "/oidc", + "workspace_id": "12345678", + } + }) + return s } From a66458b7c2be8f95570565cbccc3384691b6bb1e Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 15:08:47 +0100 Subject: [PATCH 08/17] Trust SDK auth resolution instead of overriding with HostType() When auth succeeds, return early instead of checking HostType() to decide whether to prompt or error. HostType() is a URL-pattern heuristic that returns WorkspaceHost for unified host URLs, causing false negatives for account clients on unified hosts. The SDK's auth resolution (which includes .well-known/databricks-config metadata lookups) is the source of truth. Co-authored-by: Isaac --- .../telemetry/deploy-error/out.requests.txt | 2 +- cmd/root/auth.go | 16 ++++++++++++++ cmd/root/auth_test.go | 21 +++++++++++++------ 3 files changed, 32 insertions(+), 7 deletions(-) diff --git a/acceptance/bundle/telemetry/deploy-error/out.requests.txt b/acceptance/bundle/telemetry/deploy-error/out.requests.txt index d28889843b..ac8a202b5b 100644 --- a/acceptance/bundle/telemetry/deploy-error/out.requests.txt +++ b/acceptance/bundle/telemetry/deploy-error/out.requests.txt @@ -28,7 +28,7 @@ "uploadTime": [UNIX_TIME_MILLIS], "items": [], "protoLogs": [ - "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_deploy\",\"operating_system\":\"[OS]\",\"execution_time_ms\":23,\"exit_code\":1},\"bundle_deploy_event\":{\"resource_count\":0,\"resource_job_count\":0,\"resource_pipeline_count\":0,\"resource_model_count\":0,\"resource_experiment_count\":0,\"resource_model_serving_endpoint_count\":0,\"resource_registered_model_count\":0,\"resource_quality_monitor_count\":0,\"resource_schema_count\":0,\"resource_volume_count\":0,\"resource_cluster_count\":0,\"resource_dashboard_count\":0,\"resource_app_count\":0}}}}" + "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_deploy\",\"operating_system\":\"[OS]\",\"execution_time_ms\":4,\"exit_code\":1},\"bundle_deploy_event\":{\"resource_count\":0,\"resource_job_count\":0,\"resource_pipeline_count\":0,\"resource_model_count\":0,\"resource_experiment_count\":0,\"resource_model_serving_endpoint_count\":0,\"resource_registered_model_count\":0,\"resource_quality_monitor_count\":0,\"resource_schema_count\":0,\"resource_volume_count\":0,\"resource_cluster_count\":0,\"resource_dashboard_count\":0,\"resource_app_count\":0}}}}" ] } } diff --git a/cmd/root/auth.go b/cmd/root/auth.go index d21fda26c6..fa07d81bfd 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -56,6 +56,14 @@ func accountClientOrPrompt(ctx context.Context, cfg *config.Config, allowPrompt err = a.Config.Authenticate(emptyHttpRequest(ctx)) } + // If auth succeeded and we have an account ID, trust the SDK's resolution. + // The SDK resolves host metadata (including .well-known/databricks-config) + // during config initialization, so a successful auth means the config is valid + // regardless of what HostType() returns from URL pattern matching. + if err == nil && cfg.AccountID != "" { + return a, nil + } + // Determine if we should prompt for a profile based on host type. // The SDK no longer returns ErrNotAccountClient from NewAccountClient // (as of v0.125.0, host-type validation was removed in favor of host @@ -173,6 +181,14 @@ func workspaceClientOrPrompt(ctx context.Context, cfg *config.Config, allowPromp err = w.Config.Authenticate(emptyHttpRequest(ctx)) } + // If auth succeeded, trust the SDK's resolution. The SDK resolves host + // metadata (including .well-known/databricks-config) during config + // initialization, so a successful auth means the config is valid + // regardless of what HostType() returns from URL pattern matching. + if err == nil { + return w, nil + } + // Determine if we should prompt for a profile based on host type. // The SDK no longer returns ErrNotWorkspaceClient from NewWorkspaceClient // (as of v0.125.0, host-type validation was removed in favor of host diff --git a/cmd/root/auth_test.go b/cmd/root/auth_test.go index edffdab3cb..e198799ebc 100644 --- a/cmd/root/auth_test.go +++ b/cmd/root/auth_test.go @@ -109,8 +109,11 @@ func TestAccountClientOrPrompt(t *testing.T) { expectPrompts(t, accountPromptFn, &config.Config{}) }) - t.Run("Prompt if a workspace host is specified", func(t *testing.T) { - expectPrompts(t, accountPromptFn, &config.Config{ + t.Run("Returns if a workspace host is specified with valid auth and account ID", func(t *testing.T) { + // If auth succeeds and an account ID is present, trust the SDK's resolution. + // This supports unified hosts where HostType() returns WorkspaceHost but + // account APIs are available. + expectReturns(t, accountPromptFn, &config.Config{ Host: "https://adb-1234567.89.azuredatabricks.net/", AccountID: "1234", Token: "foobar", @@ -179,8 +182,11 @@ func TestWorkspaceClientOrPrompt(t *testing.T) { expectPrompts(t, workspacePromptFn, &config.Config{}) }) - t.Run("Prompt if an account host is specified", func(t *testing.T) { - expectPrompts(t, workspacePromptFn, &config.Config{ + t.Run("Returns if an account host is specified with valid auth", func(t *testing.T) { + // If auth succeeds, trust the SDK's resolution. This supports unified + // hosts where HostType() returns AccountHost but workspace APIs are + // available. + expectReturns(t, workspacePromptFn, &config.Config{ Host: "https://accounts.azuredatabricks.net/", AccountID: "1234", Token: "foobar", @@ -442,10 +448,13 @@ func TestAccountClientOrPromptReturnsErrorForWrongHostType(t *testing.T) { assert.ErrorIs(t, err, databricks.ErrNotAccountClient) } -func TestWorkspaceClientOrPromptReturnsErrorForWrongHostType(t *testing.T) { +func TestWorkspaceClientOrPromptReturnsSuccessWhenAuthSucceeds(t *testing.T) { testutil.CleanupEnvironment(t) t.Setenv("PATH", "") + // If auth succeeds, trust the SDK's resolution regardless of HostType(). + // This supports unified hosts where HostType() returns AccountHost but + // workspace APIs are available. cfg := &config.Config{ Host: "https://accounts.azuredatabricks.net/", AccountID: "1234", @@ -455,7 +464,7 @@ func TestWorkspaceClientOrPromptReturnsErrorForWrongHostType(t *testing.T) { w, err := workspaceClientOrPrompt(t.Context(), cfg, false) assert.NotNil(t, w) - assert.ErrorIs(t, err, databricks.ErrNotWorkspaceClient) + assert.NoError(t, err) } func TestAccountClientOrPromptReturnsErrorForMissingAccountID(t *testing.T) { From 217f7a0c85019d45e17d9ec3a7f37b47a0a76185 Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 15:22:14 +0100 Subject: [PATCH 09/17] Regenerate remaining acceptance test golden files The user_agent test output now includes the .well-known/databricks-config request. The telemetry deploy test has non-deterministic request ordering that differs between terraform and direct engine variants. Co-authored-by: Isaac --- .../bundle/telemetry/deploy/out.requests.txt | 118 +++++++++--------- acceptance/bundle/user_agent/output.txt | 7 ++ 2 files changed, 66 insertions(+), 59 deletions(-) diff --git a/acceptance/bundle/telemetry/deploy/out.requests.txt b/acceptance/bundle/telemetry/deploy/out.requests.txt index c993ce6810..1f8a3a06e0 100644 --- a/acceptance/bundle/telemetry/deploy/out.requests.txt +++ b/acceptance/bundle/telemetry/deploy/out.requests.txt @@ -160,32 +160,6 @@ "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" } } -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/output.txt", - "q": { - "overwrite": "true" - }, - "raw_body": "\n\u003e\u003e\u003e [CLI] bundle deploy\nUploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...\n" -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/databricks.yml", - "q": { - "overwrite": "true" - }, - "raw_body": "bundle:\n name: test-bundle\n uuid: [UUID]\n\nresources:\n jobs:\n job_one:\n name: job one\n job_two:\n name: job two\n job_three:\n name: job three\n\n pipelines:\n pipeline_one:\n name: pipeline one\n pipeline_two:\n name: pipeline two\n" -} { "headers": { "User-Agent": [ @@ -350,19 +324,19 @@ "Distinct": false }, { - "Old": "https://127\\.0\\.0\\.1:51297", + "Old": "https://127\\.0\\.0\\.1:52538", "New": "[DATABRICKS_URL]", "Order": 0, "Distinct": false }, { - "Old": "http://127\\.0\\.0\\.1:51297", + "Old": "http://127\\.0\\.0\\.1:52538", "New": "[DATABRICKS_URL]", "Order": 0, "Distinct": false }, { - "Old": "127\\.0\\.0\\.1:51297", + "Old": "127\\.0\\.0\\.1:52538", "New": "[DATABRICKS_HOST]", "Order": 0, "Distinct": false @@ -484,10 +458,24 @@ ] }, "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/test.toml", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/output.txt", "q": { "overwrite": "true" - } + }, + "raw_body": "\n\u003e\u003e\u003e [CLI] bundle deploy\nUploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...\n" +} +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/databricks.yml", + "q": { + "overwrite": "true" + }, + "raw_body": "bundle:\n name: test-bundle\n uuid: [UUID]\n\nresources:\n jobs:\n job_one:\n name: job one\n job_two:\n name: job two\n job_three:\n name: job three\n\n pipelines:\n pipeline_one:\n name: pipeline one\n pipeline_two:\n name: pipeline two\n" } { "headers": { @@ -502,6 +490,18 @@ }, "raw_body": "{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/.well-known/databricks-config\"\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/preview/scim/v2/Me\"\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate\",\n \"return_export_info\": \"true\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/resources.json\",\n \"return_export_info\": \"true\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json\",\n \"return_export_info\": \"true\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"POST\",\n \"path\": \"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock\",\n \"q\": {\n \"overwrite\": \"false\"\n },\n \"body\": {\n \"ID\": \"[UUID]\",\n \"AcquisitionTime\": \"[TIMESTAMP]\",\n \"IsForced\": false,\n \"User\": \"[USERNAME]\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock\",\n \"return_export_info\": \"true\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/export\",\n \"q\": {\n \"direct_download\": \"true\",\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"POST\",\n \"path\": \"/api/2.0/workspace/delete\",\n \"body\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal\",\n \"recursive\": true\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"POST\",\n \"path\": \"/api/2.0/workspace/mkdirs\",\n \"body\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"POST\",\n \"path\": \"/api/2.0/workspace/mkdirs\",\n \"body\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files\"\n }\n}\n" } +{ + "headers": { + "User-Agent": [ + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" + ] + }, + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/test.toml", + "q": { + "overwrite": "true" + } +} { "headers": { "User-Agent": [ @@ -519,6 +519,10 @@ "cli_version": "[DEV_VERSION]", "timestamp": "[TIMESTAMP]", "files": [ + { + "local_path": "script", + "is_notebook": false + }, { "local_path": "test.toml", "is_notebook": false @@ -538,10 +542,6 @@ { "local_path": "repls.json", "is_notebook": false - }, - { - "local_path": "script", - "is_notebook": false } ], "id": "[UUID]" @@ -565,15 +565,6 @@ "name": "pipeline two" } } -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/pipelines/[UUID]" -} { "headers": { "User-Agent": [ @@ -619,6 +610,15 @@ "method": "GET", "path": "/api/2.0/pipelines/[UUID]" } +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.0/pipelines/[UUID]" +} { "headers": { "User-Agent": [ @@ -635,7 +635,7 @@ "edit_mode": "UI_LOCKED", "format": "MULTI_TASK", "max_concurrent_runs": 1, - "name": "job three", + "name": "job one", "queue": { "enabled": true } @@ -675,6 +675,18 @@ } } } +{ + "headers": { + "User-Agent": [ + "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" + ] + }, + "method": "GET", + "path": "/api/2.2/jobs/get", + "q": { + "job_id": "[NUMID]" + } +} { "headers": { "User-Agent": [ @@ -691,7 +703,7 @@ "edit_mode": "UI_LOCKED", "format": "MULTI_TASK", "max_concurrent_runs": 1, - "name": "job one", + "name": "job three", "queue": { "enabled": true } @@ -709,18 +721,6 @@ "job_id": "[NUMID]" } } -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.2/jobs/get", - "q": { - "job_id": "[NUMID]" - } -} { "headers": { "User-Agent": [ @@ -1263,7 +1263,7 @@ "uploadTime": [UNIX_TIME_MILLIS][2], "items": [], "protoLogs": [ - "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_deploy\",\"operating_system\":\"[OS]\",\"execution_time_ms\":1588,\"exit_code\":0},\"bundle_deploy_event\":{\"bundle_uuid\":\"[UUID]\",\"deployment_id\":\"[UUID]\",\"resource_count\":5,\"resource_job_count\":3,\"resource_pipeline_count\":2,\"resource_model_count\":0,\"resource_experiment_count\":0,\"resource_model_serving_endpoint_count\":0,\"resource_registered_model_count\":0,\"resource_quality_monitor_count\":0,\"resource_schema_count\":0,\"resource_volume_count\":0,\"resource_cluster_count\":0,\"resource_dashboard_count\":0,\"resource_app_count\":0,\"resource_job_ids\":[\"[NUMID]\",\"[NUMID]\",\"[NUMID]\"],\"resource_pipeline_ids\":[\"[UUID]\",\"[UUID]\"],\"experimental\":{\"configuration_file_count\":1,\"variable_count\":0,\"complex_variable_count\":0,\"lookup_variable_count\":0,\"target_count\":1,\"bool_values\":[{\"key\":\"local.cache.attempt\",\"value\":true},{\"key\":\"local.cache.miss\",\"value\":true},{\"key\":\"experimental.use_legacy_run_as\",\"value\":false},{\"key\":\"run_as_set\",\"value\":false},{\"key\":\"presets_name_prefix_is_set\",\"value\":false},{\"key\":\"python_wheel_wrapper_is_set\",\"value\":false},{\"key\":\"skip_artifact_cleanup\",\"value\":false},{\"key\":\"has_serverless_compute\",\"value\":false},{\"key\":\"has_classic_job_compute\",\"value\":false},{\"key\":\"has_classic_interactive_compute\",\"value\":false}],\"bundle_mode\":\"TYPE_UNSPECIFIED\",\"workspace_artifact_path_type\":\"WORKSPACE_FILE_SYSTEM\",\"bundle_mutator_execution_time_ms\":[{\"key\":\"terraform.(plan)\",\"value\":825},{\"key\":\"terraform.(apply)\",\"value\":450},{\"key\":\"phases.Initialize\",\"value\":22},{\"key\":\"files.(upload)\",\"value\":12},{\"key\":\"phases.Build\",\"value\":8},{\"key\":\"artifacts.(build)\",\"value\":7},{\"key\":\"resourcemutator.(processStaticResources)\",\"value\":7},{\"key\":\"deploy.(statePull)\",\"value\":5},{\"key\":\"mutator.(populateCurrentUser)\",\"value\":5},{\"key\":\"lock.(acquire)\",\"value\":4},{\"key\":\"mutator.(initializeCache)\",\"value\":3},{\"key\":\"terraform.(write)\",\"value\":2},{\"key\":\"artifacts.(cleanUp)\",\"value\":2},{\"key\":\"deploy.(statePush)\",\"value\":1},{\"key\":\"libraries.(upload)\",\"value\":1},{\"key\":\"metadata.(upload)\",\"value\":1},{\"key\":\"validate.FastValidate\",\"value\":0}],\"local_cache_measurements_ms\":[{\"key\":\"local.cache.compute_duration\",\"value\":0}]}}}}}" + "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_deploy\",\"operating_system\":\"[OS]\",\"execution_time_ms\":2367,\"exit_code\":0},\"bundle_deploy_event\":{\"bundle_uuid\":\"[UUID]\",\"deployment_id\":\"[UUID]\",\"resource_count\":5,\"resource_job_count\":3,\"resource_pipeline_count\":2,\"resource_model_count\":0,\"resource_experiment_count\":0,\"resource_model_serving_endpoint_count\":0,\"resource_registered_model_count\":0,\"resource_quality_monitor_count\":0,\"resource_schema_count\":0,\"resource_volume_count\":0,\"resource_cluster_count\":0,\"resource_dashboard_count\":0,\"resource_app_count\":0,\"resource_job_ids\":[\"[NUMID]\",\"[NUMID]\",\"[NUMID]\"],\"resource_pipeline_ids\":[\"[UUID]\",\"[UUID]\"],\"experimental\":{\"configuration_file_count\":1,\"variable_count\":0,\"complex_variable_count\":0,\"lookup_variable_count\":0,\"target_count\":1,\"bool_values\":[{\"key\":\"local.cache.attempt\",\"value\":true},{\"key\":\"local.cache.miss\",\"value\":true},{\"key\":\"experimental.use_legacy_run_as\",\"value\":false},{\"key\":\"run_as_set\",\"value\":false},{\"key\":\"presets_name_prefix_is_set\",\"value\":false},{\"key\":\"python_wheel_wrapper_is_set\",\"value\":false},{\"key\":\"skip_artifact_cleanup\",\"value\":false},{\"key\":\"has_serverless_compute\",\"value\":false},{\"key\":\"has_classic_job_compute\",\"value\":false},{\"key\":\"has_classic_interactive_compute\",\"value\":false}],\"bundle_mode\":\"TYPE_UNSPECIFIED\",\"workspace_artifact_path_type\":\"WORKSPACE_FILE_SYSTEM\",\"bundle_mutator_execution_time_ms\":[{\"key\":\"terraform.(plan)\",\"value\":1310},{\"key\":\"terraform.(apply)\",\"value\":732},{\"key\":\"phases.Initialize\",\"value\":15},{\"key\":\"files.(upload)\",\"value\":7},{\"key\":\"resourcemutator.(processStaticResources)\",\"value\":5},{\"key\":\"mutator.(initializeCache)\",\"value\":2},{\"key\":\"mutator.(populateCurrentUser)\",\"value\":2},{\"key\":\"phases.Build\",\"value\":2},{\"key\":\"lock.(acquire)\",\"value\":2},{\"key\":\"artifacts.(build)\",\"value\":1},{\"key\":\"terraform.(interpolateMutator)\",\"value\":1},{\"key\":\"validate.FastValidate\",\"value\":0}],\"local_cache_measurements_ms\":[{\"key\":\"local.cache.compute_duration\",\"value\":0}]}}}}}" ] } } diff --git a/acceptance/bundle/user_agent/output.txt b/acceptance/bundle/user_agent/output.txt index cbe9899beb..3589f99e06 100644 --- a/acceptance/bundle/user_agent/output.txt +++ b/acceptance/bundle/user_agent/output.txt @@ -36,6 +36,7 @@ OK deploy.terraform /api/2.0/workspace-files/import-file/Workspace/Users/[USER OK deploy.terraform /api/2.0/workspace/delete engine/terraform OK deploy.terraform /api/2.0/workspace/delete engine/terraform OK deploy.terraform /api/2.0/workspace/mkdirs engine/terraform +MISS deploy.terraform /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS deploy.terraform /api/2.1/unity-catalog/schemas/mycatalog.myschema 'databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/schema auth/pat' MISS deploy.terraform /api/2.1/unity-catalog/schemas 'databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/schema auth/pat' MISS destroy.direct /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_destroy cmd-exec-id/[UUID] interactive/none auth/pat' @@ -61,6 +62,7 @@ OK destroy.terraform /api/2.0/workspace/get-status engine/terraform OK destroy.terraform /api/2.0/workspace/get-status engine/terraform OK destroy.terraform /api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock engine/terraform OK destroy.terraform /api/2.0/workspace/delete engine/terraform +MISS destroy.terraform /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS destroy.terraform /api/2.1/unity-catalog/schemas/mycatalog.myschema 'databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/schema auth/pat' MISS destroy.terraform /api/2.1/unity-catalog/schemas/mycatalog.myschema 'databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/schema auth/pat' MISS plan.direct /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' @@ -72,6 +74,7 @@ MISS plan.terraform /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sd MISS plan.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' MISS plan.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' OK plan.terraform /api/2.0/workspace/get-status engine/terraform +MISS plan.terraform /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS plan2.direct /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' MISS plan2.direct /api/2.0/workspace-files/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/resources.json 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' MISS plan2.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' @@ -86,6 +89,7 @@ MISS plan2.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks MISS plan2.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_plan cmd-exec-id/[UUID] interactive/none auth/pat' OK plan2.terraform /api/2.0/workspace-files/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json engine/terraform OK plan2.terraform /api/2.0/workspace/get-status engine/terraform +MISS plan2.terraform /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS plan2.terraform /api/2.1/unity-catalog/schemas/mycatalog.myschema 'databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/schema auth/pat' MISS run.direct /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_run cmd-exec-id/[UUID] interactive/none auth/pat' MISS run.direct /api/2.0/workspace-files/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/resources.json 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_run cmd-exec-id/[UUID] interactive/none auth/pat' @@ -96,6 +100,7 @@ MISS run.terraform /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk MISS run.terraform /api/2.0/workspace-files/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_run cmd-exec-id/[UUID] interactive/none auth/pat' MISS run.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_run cmd-exec-id/[UUID] interactive/none auth/pat' MISS run.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_run cmd-exec-id/[UUID] interactive/none auth/pat' +MISS run.terraform /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS summary.direct /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_summary cmd-exec-id/[UUID] interactive/none auth/pat' MISS summary.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_summary cmd-exec-id/[UUID] interactive/none auth/pat' MISS summary.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_summary cmd-exec-id/[UUID] interactive/none auth/pat' @@ -105,6 +110,7 @@ MISS summary.terraform /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks MISS summary.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_summary cmd-exec-id/[UUID] interactive/none auth/pat' MISS summary.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_summary cmd-exec-id/[UUID] interactive/none auth/pat' OK summary.terraform /api/2.0/preview/scim/v2/Me engine/terraform +MISS summary.terraform /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' MISS validate.direct /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_validate cmd-exec-id/[UUID] interactive/none auth/pat' MISS validate.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_validate cmd-exec-id/[UUID] interactive/none auth/pat' MISS validate.direct /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_validate cmd-exec-id/[UUID] interactive/none auth/pat' @@ -114,3 +120,4 @@ MISS validate.terraform /api/2.0/preview/scim/v2/Me 'cli/[DEV_VERSION] databrick MISS validate.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_validate cmd-exec-id/[UUID] interactive/none auth/pat' MISS validate.terraform /api/2.0/workspace/get-status 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_validate cmd-exec-id/[UUID] interactive/none auth/pat' MISS validate.terraform /api/2.0/workspace/mkdirs 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_validate cmd-exec-id/[UUID] interactive/none auth/pat' +MISS validate.terraform /.well-known/databricks-config 'cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]' From 2c7d6cfa680cd0e11a98d0de6c96fa7be62160e1 Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 15:31:42 +0100 Subject: [PATCH 10/17] Address PR review feedback - Revert tagging.yml and tagging.py to main (no version downgrades or mode changes from codegen) - Remove user_agent_agent_test.go per reviewer feedback (we don't test other SDK-injected UA bits, no need to test this one) - Fix govet nilness lint in workspaceClientOrPrompt: simplify needsPrompt logic to avoid tautological/impossible nil checks after the early return on successful auth Co-authored-by: Isaac --- .github/workflows/tagging.yml | 4 +- cmd/root/auth.go | 29 +++----- cmd/root/auth_test.go | 8 ++- cmd/root/user_agent_agent_test.go | 110 ------------------------------ internal/genkit/tagging.py | 0 5 files changed, 18 insertions(+), 133 deletions(-) mode change 100755 => 100644 .github/workflows/tagging.yml delete mode 100644 cmd/root/user_agent_agent_test.go mode change 100755 => 100644 internal/genkit/tagging.py diff --git a/.github/workflows/tagging.yml b/.github/workflows/tagging.yml old mode 100755 new mode 100644 index 9bca7399f3..0752107ac9 --- a/.github/workflows/tagging.yml +++ b/.github/workflows/tagging.yml @@ -34,13 +34,13 @@ jobs: steps: - name: Generate GitHub App Token id: generate-token - uses: actions/create-github-app-token@v2 + uses: actions/create-github-app-token@v3 with: app-id: ${{ secrets.DECO_SDK_TAGGING_APP_ID }} private-key: ${{ secrets.DECO_SDK_TAGGING_PRIVATE_KEY }} - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 0 token: ${{ steps.generate-token.outputs.token }} diff --git a/cmd/root/auth.go b/cmd/root/auth.go index fa07d81bfd..ff95967275 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -189,32 +189,21 @@ func workspaceClientOrPrompt(ctx context.Context, cfg *config.Config, allowPromp return w, nil } - // Determine if we should prompt for a profile based on host type. - // The SDK no longer returns ErrNotWorkspaceClient from NewWorkspaceClient - // (as of v0.125.0, host-type validation was removed in favor of host - // metadata resolution). Use HostType() to detect the wrong host type. - var needsPrompt bool - switch cfg.HostType() { - case config.WorkspaceHost, config.UnifiedHost: - // Both workspace and unified hosts can serve workspace APIs. - needsPrompt = false - default: - // AccountHost or unknown: wrong type for workspace client. - needsPrompt = true - } - if !needsPrompt && err != nil && errors.Is(err, config.ErrCannotConfigureDefault) { - needsPrompt = true - } + // Determine if we should prompt for a profile. The SDK no longer returns + // ErrNotWorkspaceClient from NewWorkspaceClient (as of v0.125.0, host-type + // validation was removed in favor of host metadata resolution). Use + // HostType() to detect wrong host type, and check for ErrCannotConfigureDefault. + needsPrompt := cfg.HostType() == config.AccountHost || + errors.Is(err, config.ErrCannotConfigureDefault) if !needsPrompt { return w, err } if !allowPrompt || !cmdio.IsPromptSupported(ctx) { - if err == nil { - err = databricks.ErrNotWorkspaceClient - } - return w, err + // Synthesize ErrNotWorkspaceClient so callers (like MustAnyClient) + // can detect the wrong config type and fall through to account client. + return w, databricks.ErrNotWorkspaceClient } // Try picking a profile dynamically if the current configuration is not valid. diff --git a/cmd/root/auth_test.go b/cmd/root/auth_test.go index e198799ebc..b0faadd94b 100644 --- a/cmd/root/auth_test.go +++ b/cmd/root/auth_test.go @@ -17,8 +17,14 @@ import ( "github.com/stretchr/testify/require" ) +type roundTripperFunc func(*http.Request) (*http.Response, error) + +func (f roundTripperFunc) RoundTrip(r *http.Request) (*http.Response, error) { + return f(r) +} + // noNetworkTransport prevents real HTTP calls in auth tests. -// Returns 404 for all requests, preventing real HTTP calls during auth tests. +// Returns 404 for all requests so host metadata resolution falls back gracefully. var noNetworkTransport = roundTripperFunc(func(r *http.Request) (*http.Response, error) { return &http.Response{StatusCode: http.StatusNotFound, Body: http.NoBody}, nil }) diff --git a/cmd/root/user_agent_agent_test.go b/cmd/root/user_agent_agent_test.go deleted file mode 100644 index d2dec2a2a2..0000000000 --- a/cmd/root/user_agent_agent_test.go +++ /dev/null @@ -1,110 +0,0 @@ -package root - -import ( - "net/http" - "os" - "strings" - "testing" - - "github.com/databricks/databricks-sdk-go/config" - "github.com/databricks/databricks-sdk-go/httpclient" - "github.com/databricks/databricks-sdk-go/useragent" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// All known agent env vars. Must be unset in tests to avoid interference -// from the host environment (e.g., running tests inside Claude Code). -var agentEnvVars = []string{ - "ANTIGRAVITY_AGENT", - "CLAUDECODE", - "CLINE_ACTIVE", - "CODEX_CI", - "COPILOT_CLI", - "CURSOR_AGENT", - "GEMINI_CLI", - "OPENCLAW_SHELL", - "OPENCODE", -} - -// unsetAgentEnv removes all known agent env vars from the environment. -// The SDK uses os.LookupEnv, so setting to empty is not enough; the vars -// must be fully unset. -func unsetAgentEnv(t *testing.T) { - t.Helper() - for _, v := range agentEnvVars { - original, exists := os.LookupEnv(v) - os.Unsetenv(v) - if exists { - t.Cleanup(func() { os.Setenv(v, original) }) - } - } -} - -// captureUserAgent makes an HTTP request through the SDK and returns the -// captured User-Agent header string. -func captureUserAgent(t *testing.T) string { - t.Helper() - - var capturedUA string - cfg := &config.Config{ - Host: "https://test.databricks.com", - Token: "test-token", - HTTPTransport: roundTripperFunc(func(r *http.Request) (*http.Response, error) { - capturedUA = r.Header.Get("User-Agent") - return &http.Response{StatusCode: 200, Body: http.NoBody}, nil - }), - } - - clientCfg, err := config.HTTPClientConfigFromConfig(cfg) - require.NoError(t, err) - client := httpclient.NewApiClient(clientCfg) - - _ = client.Do(t.Context(), "GET", "/api/2.0/clusters/list") - return capturedUA -} - -// TestSDKAgentDetection verifies the SDK adds agent/ to the User-Agent -// header when exactly one agent env var is set. -func TestSDKAgentDetection(t *testing.T) { - unsetAgentEnv(t) - useragent.ClearCache() - t.Cleanup(useragent.ClearCache) - - t.Setenv("CLAUDECODE", "1") - - ua := captureUserAgent(t) - assert.Contains(t, ua, "agent/claude-code") - assert.Equal(t, 1, strings.Count(ua, "agent/"), "expected exactly one agent/ segment") -} - -// TestSDKNoAgentDetected verifies no agent/ segment is added when no agent -// env vars are set. -func TestSDKNoAgentDetected(t *testing.T) { - unsetAgentEnv(t) - useragent.ClearCache() - t.Cleanup(useragent.ClearCache) - - ua := captureUserAgent(t) - assert.NotContains(t, ua, "agent/") -} - -// TestSDKMultipleAgentsSuppressed verifies no agent/ segment is added when -// multiple agent env vars are set (ambiguity guard). -func TestSDKMultipleAgentsSuppressed(t *testing.T) { - unsetAgentEnv(t) - useragent.ClearCache() - t.Cleanup(useragent.ClearCache) - - t.Setenv("CLAUDECODE", "1") - t.Setenv("CURSOR_AGENT", "1") - - ua := captureUserAgent(t) - assert.NotContains(t, ua, "agent/") -} - -type roundTripperFunc func(*http.Request) (*http.Response, error) - -func (f roundTripperFunc) RoundTrip(r *http.Request) (*http.Response, error) { - return f(r) -} diff --git a/internal/genkit/tagging.py b/internal/genkit/tagging.py old mode 100755 new mode 100644 From f21dc104d0edd6fd63e808d933cdf62df0dd3f8b Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 15:41:59 +0100 Subject: [PATCH 11/17] Fix auth error preservation and platform-specific DNS golden files Only synthesize ErrNotWorkspaceClient when the host type is actually wrong (AccountHost). For other auth errors like ErrCannotConfigureDefault, return the original error to preserve actionable error messages. Also add replacement rules for platform-specific DNS resolver details in auth switch/profiles acceptance tests (Linux includes "on 127.0.0.53:53", macOS does not). Co-authored-by: Isaac --- acceptance/cmd/auth/profiles/test.toml | 6 ++++++ acceptance/cmd/auth/switch/nominal/test.toml | 6 ++++++ cmd/root/auth.go | 15 ++++++++++----- 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/acceptance/cmd/auth/profiles/test.toml b/acceptance/cmd/auth/profiles/test.toml index 36c0e7e237..ad8ec1f872 100644 --- a/acceptance/cmd/auth/profiles/test.toml +++ b/acceptance/cmd/auth/profiles/test.toml @@ -1,3 +1,9 @@ Ignore = [ "home" ] + +# Normalize platform-specific DNS error messages in host metadata warnings. +# Linux includes resolver address (e.g. "on 127.0.0.53:53"), macOS does not. +[[Repls]] +Old = 'dial tcp: lookup (\S+)( on \S+)?: no such host' +New = 'dial tcp: lookup $1: no such host' diff --git a/acceptance/cmd/auth/switch/nominal/test.toml b/acceptance/cmd/auth/switch/nominal/test.toml index 36c0e7e237..ad8ec1f872 100644 --- a/acceptance/cmd/auth/switch/nominal/test.toml +++ b/acceptance/cmd/auth/switch/nominal/test.toml @@ -1,3 +1,9 @@ Ignore = [ "home" ] + +# Normalize platform-specific DNS error messages in host metadata warnings. +# Linux includes resolver address (e.g. "on 127.0.0.53:53"), macOS does not. +[[Repls]] +Old = 'dial tcp: lookup (\S+)( on \S+)?: no such host' +New = 'dial tcp: lookup $1: no such host' diff --git a/cmd/root/auth.go b/cmd/root/auth.go index ff95967275..477db14337 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -193,17 +193,22 @@ func workspaceClientOrPrompt(ctx context.Context, cfg *config.Config, allowPromp // ErrNotWorkspaceClient from NewWorkspaceClient (as of v0.125.0, host-type // validation was removed in favor of host metadata resolution). Use // HostType() to detect wrong host type, and check for ErrCannotConfigureDefault. - needsPrompt := cfg.HostType() == config.AccountHost || - errors.Is(err, config.ErrCannotConfigureDefault) + wrongHostType := cfg.HostType() == config.AccountHost + needsPrompt := wrongHostType || errors.Is(err, config.ErrCannotConfigureDefault) if !needsPrompt { return w, err } if !allowPrompt || !cmdio.IsPromptSupported(ctx) { - // Synthesize ErrNotWorkspaceClient so callers (like MustAnyClient) - // can detect the wrong config type and fall through to account client. - return w, databricks.ErrNotWorkspaceClient + // Only synthesize ErrNotWorkspaceClient for wrong host type so that + // callers like MustAnyClient can fall through to account client. + // For other errors (e.g. ErrCannotConfigureDefault), return the + // original error to preserve actionable error messages. + if wrongHostType { + return w, databricks.ErrNotWorkspaceClient + } + return w, err } // Try picking a profile dynamically if the current configuration is not valid. From 3075fda601002c33c1f76315d0354eb46f989b0f Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 16:00:22 +0100 Subject: [PATCH 12/17] Fix telemetry acceptance tests and pydabs check-formatting The .well-known/databricks-config request (added by the SDK bump) became the first recorded request in out.requests.txt. Since it lacks a cmd-exec-id in User-Agent, extract_command_exec_id.py failed on its first JSON object. Fix the script to iterate through all objects until finding one with cmd-exec-id. Also add a Repls pattern for compact JSON execution_time_ms (no space after colon, as it appears in protoLogs strings) and regenerate all affected golden files. For pydabs/check-formatting, regenerate with ruff installed so the output reflects actual formatting checks. Co-authored-by: Isaac --- acceptance/bin/extract_command_exec_id.py | 25 +- .../telemetry/deploy-error/out.requests.txt | 34 - .../telemetry/deploy-error/out.telemetry.txt | 2 +- .../bundle/telemetry/deploy-error/output.txt | 3 - .../bundle/telemetry/deploy/out.requests.txt | 1269 ----------------- .../bundle/telemetry/deploy/out.telemetry.txt | 6 +- acceptance/bundle/telemetry/deploy/output.txt | 3 - acceptance/bundle/telemetry/test.toml | 5 + .../pydabs/check-formatting/output.txt | 6 +- .../custom-template/out.databricks.yml | 2 +- .../custom-template/out.requests.txt | 4 +- .../telemetry/custom-template/output.txt | 22 +- .../telemetry/dbt-sql/out.databricks.yml | 2 +- .../telemetry/dbt-sql/out.requests.txt | 8 +- .../templates/telemetry/dbt-sql/output.txt | 32 +- .../default-python/out.databricks.yml | 2 +- .../telemetry/default-python/out.requests.txt | 8 +- .../telemetry/default-python/output.txt | 48 +- .../telemetry/default-sql/out.databricks.yml | 2 +- .../telemetry/default-sql/out.requests.txt | 6 +- .../telemetry/default-sql/output.txt | 28 +- 21 files changed, 163 insertions(+), 1354 deletions(-) delete mode 100644 acceptance/bundle/telemetry/deploy-error/out.requests.txt delete mode 100644 acceptance/bundle/telemetry/deploy/out.requests.txt diff --git a/acceptance/bin/extract_command_exec_id.py b/acceptance/bin/extract_command_exec_id.py index a9eb981110..781c3e7247 100755 --- a/acceptance/bin/extract_command_exec_id.py +++ b/acceptance/bin/extract_command_exec_id.py @@ -8,34 +8,31 @@ def extract_cmd_exec_id(): requests_file = Path("out.requests.txt") - # Read until we find a complete JSON object. This is required because we pretty - # print the JSON object (with new lines) in the out.requests.txt file. + # Read JSON objects one at a time and find the first one with a cmd-exec-id + # in the User-Agent header. Some requests (e.g. .well-known/databricks-config) + # are made before the command execution context is set and lack cmd-exec-id. with requests_file.open("r") as f: json_str = "" while True: line = f.readline() if not line: - raise SystemExit("Requests file is empty") + break json_str += line try: - # Try to parse the accumulated string as JSON data = json.loads(json_str) - break except json.JSONDecodeError: - # If incomplete, continue reading continue - user_agent = data["headers"]["User-Agent"][0] - - if not user_agent: - raise SystemExit("User-Agent header is empty") + # Reset for next JSON object + json_str = "" - match = re.search(r"cmd-exec-id/([^\s]+)", user_agent) - if match: - return match.group(1) + user_agent = data.get("headers", {}).get("User-Agent", [""])[0] + match = re.search(r"cmd-exec-id/([^\s]+)", user_agent) + if match: + return match.group(1) - raise SystemExit(f"No command execution ID found in User-Agent: {user_agent}") + raise SystemExit("No command execution ID found in any request in out.requests.txt") if __name__ == "__main__": diff --git a/acceptance/bundle/telemetry/deploy-error/out.requests.txt b/acceptance/bundle/telemetry/deploy-error/out.requests.txt deleted file mode 100644 index ac8a202b5b..0000000000 --- a/acceptance/bundle/telemetry/deploy-error/out.requests.txt +++ /dev/null @@ -1,34 +0,0 @@ -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" - ] - }, - "method": "GET", - "path": "/.well-known/databricks-config" -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/preview/scim/v2/Me" -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat" - ] - }, - "method": "POST", - "path": "/telemetry-ext", - "body": { - "uploadTime": [UNIX_TIME_MILLIS], - "items": [], - "protoLogs": [ - "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_deploy\",\"operating_system\":\"[OS]\",\"execution_time_ms\":4,\"exit_code\":1},\"bundle_deploy_event\":{\"resource_count\":0,\"resource_job_count\":0,\"resource_pipeline_count\":0,\"resource_model_count\":0,\"resource_experiment_count\":0,\"resource_model_serving_endpoint_count\":0,\"resource_registered_model_count\":0,\"resource_quality_monitor_count\":0,\"resource_schema_count\":0,\"resource_volume_count\":0,\"resource_cluster_count\":0,\"resource_dashboard_count\":0,\"resource_app_count\":0}}}}" - ] - } -} diff --git a/acceptance/bundle/telemetry/deploy-error/out.telemetry.txt b/acceptance/bundle/telemetry/deploy-error/out.telemetry.txt index 284c95969a..b3eff6a16f 100644 --- a/acceptance/bundle/telemetry/deploy-error/out.telemetry.txt +++ b/acceptance/bundle/telemetry/deploy-error/out.telemetry.txt @@ -1,6 +1,6 @@ { "execution_context": { - "cmd_exec_id": "[UUID]", + "cmd_exec_id": "[CMD-EXEC-ID]", "version": "[DEV_VERSION]", "command": "bundle_deploy", "operating_system": "[OS]", diff --git a/acceptance/bundle/telemetry/deploy-error/output.txt b/acceptance/bundle/telemetry/deploy-error/output.txt index 0269af91b6..5728b1d45d 100644 --- a/acceptance/bundle/telemetry/deploy-error/output.txt +++ b/acceptance/bundle/telemetry/deploy-error/output.txt @@ -6,6 +6,3 @@ Error: unable to define default workspace root: bundle name not defined Exit code: 1 >>> cat out.requests.txt -No command execution ID found in User-Agent: cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] - -Exit code: 1 diff --git a/acceptance/bundle/telemetry/deploy/out.requests.txt b/acceptance/bundle/telemetry/deploy/out.requests.txt deleted file mode 100644 index 1f8a3a06e0..0000000000 --- a/acceptance/bundle/telemetry/deploy/out.requests.txt +++ /dev/null @@ -1,1269 +0,0 @@ -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]" - ] - }, - "method": "GET", - "path": "/.well-known/databricks-config" -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/preview/scim/v2/Me" -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/workspace/get-status", - "q": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate", - "return_export_info": "true" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/workspace/get-status", - "q": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/resources.json", - "return_export_info": "true" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/workspace/get-status", - "q": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json", - "return_export_info": "true" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock", - "q": { - "overwrite": "false" - }, - "body": { - "ID": "[UUID]", - "AcquisitionTime": "[TIMESTAMP]", - "IsForced": false, - "User": "[USERNAME]" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/workspace/get-status", - "q": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock", - "return_export_info": "true" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/workspace/export", - "q": { - "direct_download": "true", - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace/delete", - "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal", - "recursive": true - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace/mkdirs", - "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/workspace/get-status", - "q": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace/mkdirs", - "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/workspace/get-status", - "q": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/script", - "q": { - "overwrite": "true" - }, - "raw_body": "errcode() {\n # Temporarily disable 'set -e' to prevent the script from exiting on error\n set +e\n # Execute the provided command with all arguments\n \"$@\"\n local exit_code=$?\n # Re-enable 'set -e' if it was previously set\n set -e\n if [ $exit_code -ne 0 ]; then\n \u003e\u00262 printf \"\\nExit code: $exit_code\\n\"\n fi\n}\n\nmusterr() {\n # Temporarily disable 'set -e' to prevent the script from exiting on error\n set +e\n # Execute the provided command with all arguments\n \"$@\"\n local exit_code=$?\n # Re-enable 'set -e'\n set -e\n if [ $exit_code -eq 0 ]; then\n \u003e\u00262 printf \"\\nUnexpected success\\n\"\n exit 1\n fi\n}\n\ntrace() {\n \u003e\u00262 printf \"\\n\u003e\u003e\u003e %s\\n\" \"$*\"\n\n if [[ \"$1\" == *\"=\"* ]]; then\n # If the first argument contains '=', collect all env vars\n local env_vars=()\n while [[ \"$1\" == *\"=\"* ]]; do\n env_vars+=(\"$1\")\n shift\n done\n # Export environment variables in a subshell and execute the command\n (\n export \"${env_vars[@]}\"\n \"$@\"\n )\n else\n # Execute the command normally\n \"$@\"\n fi\n\n return $?\n}\n\ngit-repo-init() {\n git init -qb main\n git config core.autocrlf false\n git config user.name \"Tester\"\n git config user.email \"[USERNAME]\"\n git config core.hooksPath no-hooks\n git add databricks.yml\n git commit -qm 'Add databricks.yml'\n}\n\ntitle() {\n local label=\"$1\"\n printf \"\\n=== %b\" \"$label\"\n}\n\nwithdir() {\n local dir=\"$1\"\n shift\n local orig_dir=\"$(pwd)\"\n cd \"$dir\" || return $?\n \"$@\"\n local exit_code=$?\n cd \"$orig_dir\" || return $?\n return $exit_code\n}\n\nuuid() {\n python3 -c 'import uuid; print(uuid.uuid4())'\n}\n\nvenv_activate() {\n if [[ \"$OSTYPE\" == \"msys\" || \"$OSTYPE\" == \"cygwin\" || \"$OSTYPE\" == \"win32\" ]]; then\n source .venv/Scripts/activate\n else\n source .venv/bin/activate\n fi\n}\n\nenvsubst() {\n # We need to disable MSYS_NO_PATHCONV when running the python script.\n # This is because the python interpreter is otherwise unable to find the python script\n # when MSYS_NO_PATHCONV is enabled.\n env -u MSYS_NO_PATHCONV envsubst.py\n}\n\nprint_telemetry_bool_values() {\n jq -r 'select(.path? == \"/telemetry-ext\") | (.body.protoLogs // [])[] | fromjson | ( (.entry // .) | (.databricks_cli_log.bundle_deploy_event.experimental.bool_values // []) ) | map(\"\\(.key) \\(.value)\") | .[]' out.requests.txt | sort\n}\n\nsethome() {\n local home=\"$1\"\n mkdir -p \"$home\"\n\n # For macOS and Linux, use HOME.\n export HOME=\"$home\"\n\n # For Windows, use USERPROFILE.\n export USERPROFILE=\"$home\"\n}\n\nas-test-sp() {\n if [[ -z \"$TEST_SP_TOKEN\" ]]; then\n echo \"Error: TEST_SP_TOKEN is not set.\" \u003e\u00262\n return 1\n fi\n\n DATABRICKS_TOKEN=\"$TEST_SP_TOKEN\" \\\n DATABRICKS_CLIENT_SECRET=\"\" \\\n DATABRICKS_CLIENT_ID=\"\" \\\n DATABRICKS_AUTH_TYPE=\"\" \\\n \"$@\"\n}\n\nreadplanarg() {\n # Expands into \"--plan \u003cfilename\u003e\" based on READPLAN env var\n # Use it with \"bundle deploy\" to configure two runs: once with saved plan and one without.\n # Note: READPLAN is specially handled in test runner so that engine=terraform/readplan is set combination is skipped.\n if [[ -n \"$READPLAN\" ]]; then\n printf -- \"--plan %s\" \"$1\"\n else\n printf \"\"\n fi\n}\n\n(\ntrace $CLI bundle deploy\n\ntrace cat out.requests.txt | jq 'select(has(\"path\") and .path == \"/telemetry-ext\") | .body.protoLogs[] | fromjson' \u003e telemetry.json\n\n# Assert that the telemetry mechanism is working and recording mutator execution times.\n# We only check that at least one mutator execution time is recorded to avoid flakiness on fast machines\n# where many mutators may complete in less than 1ms (the threshold for recording in bundle/mutator.go).\ntitle \"Assert that mutator execution times are being recorded\"\ntrace cat telemetry.json | jq ' .entry.databricks_cli_log.bundle_deploy_event.experimental.bundle_mutator_execution_time_ms | length \u003e 0'\n\n# bundle_mutator_execution_time_ms can have variable number of entries depending upon the runtime of the mutators. Thus we omit it from\n# being asserted here.\ncat telemetry.json | jq 'del(.entry.databricks_cli_log.bundle_deploy_event.experimental.bundle_mutator_execution_time_ms)' \u003e out.telemetry.txt\n\ncmd_exec_id=$(extract_command_exec_id.py)\ndeployment_id=$(cat .databricks/bundle/default/deployment.json | jq -r .id)\n\nupdate_file.py out.telemetry.txt $cmd_exec_id '[CMD-EXEC-ID]'\nupdate_file.py out.telemetry.txt \"[UUID]\" '[BUNDLE_UUID]'\nupdate_file.py out.telemetry.txt $deployment_id '[DEPLOYMENT_ID]'\n\nrm out.requests.txt\nrm telemetry.json\n)\n\nrm -fr .databricks .gitignore\n" -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/repls.json", - "q": { - "overwrite": "true" - }, - "body": [ - { - "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance/build/[OS]_arm64/\\.terraformrc", - "New": "[DATABRICKS_TF_CLI_CONFIG_FILE]", - "Order": 0, - "Distinct": false - }, - { - "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance/build/[OS]_arm64/terraform", - "New": "[TERRAFORM]", - "Order": 0, - "Distinct": false - }, - { - "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance/build/[OS]_arm64/databricks_bundles-0\\.295\\.0-py3-none-any\\.whl", - "New": "[DATABRICKS_BUNDLES_WHEEL]", - "Order": 0, - "Distinct": false - }, - { - "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance/build/[OS]_arm64/databricks", - "New": "[CLI]", - "Order": 0, - "Distinct": false - }, - { - "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance/build/[OS]_arm64/0\\.293\\.0/databricks", - "New": "[CLI_293]", - "Order": 0, - "Distinct": false - }, - { - "Old": "[TEST_DEFAULT_WAREHOUSE_ID]", - "New": "[TEST_DEFAULT_WAREHOUSE_ID]", - "Order": 0, - "Distinct": false - }, - { - "Old": "[TEST_DEFAULT_CLUSTER_ID]", - "New": "[TEST_DEFAULT_CLUSTER_ID]", - "Order": 0, - "Distinct": false - }, - { - "Old": "[TEST_INSTANCE_POOL_ID]", - "New": "[TEST_INSTANCE_POOL_ID]", - "Order": 0, - "Distinct": false - }, - { - "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance/build/[OS]_arm64", - "New": "[BUILD_DIR]", - "Order": 0, - "Distinct": false - }, - { - "Old": "0\\.0\\.0-dev(\\+[a-f0-9]{10,16})?", - "New": "[DEV_VERSION]", - "Order": 0, - "Distinct": false - }, - { - "Old": "databricks-sdk-go/[0-9]+\\.[0-9]+\\.[0-9]+", - "New": "databricks-sdk-go/[SDK_VERSION]", - "Order": 0, - "Distinct": false - }, - { - "Old": "1\\.25\\.7", - "New": "[GO_VERSION]", - "Order": 0, - "Distinct": false - }, - { - "Old": "/Users/simon\\.faltum/workdir/cli/\\.claude/worktrees/sdk-agent-cleanup/acceptance", - "New": "[TESTROOT]", - "Order": 0, - "Distinct": false - }, - { - "Old": "dbapi[0-9a-f]+", - "New": "[DATABRICKS_TOKEN]", - "Order": 0, - "Distinct": false - }, - { - "Old": "i3\\.xlarge", - "New": "[NODE_TYPE_ID]", - "Order": 0, - "Distinct": false - }, - { - "Old": "[UNIQUE_NAME]", - "New": "[UNIQUE_NAME]", - "Order": 0, - "Distinct": false - }, - { - "Old": "[TEST_TMP_DIR]", - "New": "[TEST_TMP_DIR]", - "Order": 0, - "Distinct": false - }, - { - "Old": "[TEST_TMP_DIR]", - "New": "[TEST_TMP_DIR]", - "Order": 0, - "Distinct": false - }, - { - "Old": "[TEST_TMP_DIR]_PARENT", - "New": "[TEST_TMP_DIR]_PARENT", - "Order": 0, - "Distinct": false - }, - { - "Old": "[TEST_TMP_DIR]_PARENT", - "New": "[TEST_TMP_DIR]_PARENT", - "Order": 0, - "Distinct": false - }, - { - "Old": "[USERNAME]@databricks\\.com", - "New": "[USERNAME]", - "Order": 0, - "Distinct": false - }, - { - "Old": "[USERNAME]", - "New": "[USERNAME]", - "Order": 0, - "Distinct": false - }, - { - "Old": "[USERID]", - "New": "[USERID]", - "Order": 0, - "Distinct": false - }, - { - "Old": "https://127\\.0\\.0\\.1:52538", - "New": "[DATABRICKS_URL]", - "Order": 0, - "Distinct": false - }, - { - "Old": "http://127\\.0\\.0\\.1:52538", - "New": "[DATABRICKS_URL]", - "Order": 0, - "Distinct": false - }, - { - "Old": "127\\.0\\.0\\.1:52538", - "New": "[DATABRICKS_HOST]", - "Order": 0, - "Distinct": false - }, - { - "Old": "[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}", - "New": "[UUID]", - "Order": 0, - "Distinct": false - }, - { - "Old": "\\d{20,}", - "New": "[NUMID]", - "Order": 10, - "Distinct": false - }, - { - "Old": "1[78]\\d{17}", - "New": "[UNIX_TIME_NANOS]", - "Order": 10, - "Distinct": true - }, - { - "Old": "\\d{17,}", - "New": "[NUMID]", - "Order": 10, - "Distinct": false - }, - { - "Old": "\\d{14,}", - "New": "[NUMID]", - "Order": 10, - "Distinct": false - }, - { - "Old": "1[78]\\d{11}", - "New": "[UNIX_TIME_MILLIS]", - "Order": 10, - "Distinct": true - }, - { - "Old": "\\d{11,}", - "New": "[NUMID]", - "Order": 10, - "Distinct": false - }, - { - "Old": "1[78]\\d{8}", - "New": "[UNIX_TIME_S]", - "Order": 10, - "Distinct": false - }, - { - "Old": "\\d{8,}", - "New": "[NUMID]", - "Order": 10, - "Distinct": false - }, - { - "Old": "2\\d\\d\\d-\\d\\d-\\d\\d(T| )\\d\\d:\\d\\d:\\d\\d\\.\\d+(Z|\\+\\d\\d:\\d\\d)?", - "New": "[TIMESTAMP]", - "Order": 9, - "Distinct": false - }, - { - "Old": "2\\d\\d\\d-\\d\\d-\\d\\d(T| )\\d\\d:\\d\\d:\\d\\dZ?", - "New": "[TIMESTAMP]", - "Order": 9, - "Distinct": false - }, - { - "Old": "os/[OS]", - "New": "os/[OS]", - "Order": 0, - "Distinct": false - }, - { - "Old": "os/[OS]", - "New": "os/[OS]", - "Order": 0, - "Distinct": false - }, - { - "Old": "os/[OS]", - "New": "os/[OS]", - "Order": 0, - "Distinct": false - }, - { - "Old": "", - "New": "", - "Order": 0, - "Distinct": false - }, - { - "Old": "\"execution_time_ms\": \\d{1,5},", - "New": "\"execution_time_ms\": SMALL_INT,", - "Order": 0, - "Distinct": false - }, - { - "Old": "([OS]|[OS]|[OS])", - "New": "[OS]", - "Order": 0, - "Distinct": false - }, - { - "Old": "\"local_cache_measurements_ms\": \\[[^\\]]*\\]", - "New": "\"local_cache_measurements_ms\": [...redacted...]", - "Order": 0, - "Distinct": false - } - ] -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/output.txt", - "q": { - "overwrite": "true" - }, - "raw_body": "\n\u003e\u003e\u003e [CLI] bundle deploy\nUploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...\n" -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/databricks.yml", - "q": { - "overwrite": "true" - }, - "raw_body": "bundle:\n name: test-bundle\n uuid: [UUID]\n\nresources:\n jobs:\n job_one:\n name: job one\n job_two:\n name: job two\n job_three:\n name: job three\n\n pipelines:\n pipeline_one:\n name: pipeline one\n pipeline_two:\n name: pipeline two\n" -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/out.requests.txt", - "q": { - "overwrite": "true" - }, - "raw_body": "{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/.well-known/databricks-config\"\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/preview/scim/v2/Me\"\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate\",\n \"return_export_info\": \"true\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/resources.json\",\n \"return_export_info\": \"true\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json\",\n \"return_export_info\": \"true\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"POST\",\n \"path\": \"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock\",\n \"q\": {\n \"overwrite\": \"false\"\n },\n \"body\": {\n \"ID\": \"[UUID]\",\n \"AcquisitionTime\": \"[TIMESTAMP]\",\n \"IsForced\": false,\n \"User\": \"[USERNAME]\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock\",\n \"return_export_info\": \"true\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/export\",\n \"q\": {\n \"direct_download\": \"true\",\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"POST\",\n \"path\": \"/api/2.0/workspace/delete\",\n \"body\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal\",\n \"recursive\": true\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"POST\",\n \"path\": \"/api/2.0/workspace/mkdirs\",\n \"body\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/artifacts/.internal\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"POST\",\n \"path\": \"/api/2.0/workspace/mkdirs\",\n \"body\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files\"\n }\n}\n{\n \"headers\": {\n \"User-Agent\": [\n \"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat\"\n ]\n },\n \"method\": \"GET\",\n \"path\": \"/api/2.0/workspace/get-status\",\n \"q\": {\n \"path\": \"/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files\"\n }\n}\n" -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/test.toml", - "q": { - "overwrite": "true" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deployment.json", - "q": { - "overwrite": "true" - }, - "body": { - "version": 1, - "seq": 1, - "cli_version": "[DEV_VERSION]", - "timestamp": "[TIMESTAMP]", - "files": [ - { - "local_path": "script", - "is_notebook": false - }, - { - "local_path": "test.toml", - "is_notebook": false - }, - { - "local_path": "databricks.yml", - "is_notebook": false - }, - { - "local_path": "out.requests.txt", - "is_notebook": false - }, - { - "local_path": "output.txt", - "is_notebook": false - }, - { - "local_path": "repls.json", - "is_notebook": false - } - ], - "id": "[UUID]" - } -} -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/pipelines", - "body": { - "channel": "CURRENT", - "deployment": { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - }, - "edition": "ADVANCED", - "name": "pipeline two" - } -} -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/pipelines", - "body": { - "channel": "CURRENT", - "deployment": { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - }, - "edition": "ADVANCED", - "name": "pipeline one" - } -} -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/pipelines/[UUID]" -} -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/pipelines/[UUID]" -} -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/pipelines/[UUID]" -} -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/pipeline auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/pipelines/[UUID]" -} -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.2/jobs/create", - "body": { - "deployment": { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - }, - "edit_mode": "UI_LOCKED", - "format": "MULTI_TASK", - "max_concurrent_runs": 1, - "name": "job one", - "queue": { - "enabled": true - } - } -} -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.2/jobs/get", - "q": { - "job_id": "[NUMID]" - } -} -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.2/jobs/create", - "body": { - "deployment": { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - }, - "edit_mode": "UI_LOCKED", - "format": "MULTI_TASK", - "max_concurrent_runs": 1, - "name": "job two", - "queue": { - "enabled": true - } - } -} -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.2/jobs/get", - "q": { - "job_id": "[NUMID]" - } -} -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.2/jobs/create", - "body": { - "deployment": { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - }, - "edit_mode": "UI_LOCKED", - "format": "MULTI_TASK", - "max_concurrent_runs": 1, - "name": "job three", - "queue": { - "enabled": true - } - } -} -{ - "headers": { - "User-Agent": [ - "databricks-tf-provider/1.111.0 databricks-sdk-go/[SDK_VERSION] go/1.24.0 os/[OS] cli/[DEV_VERSION] terraform/1.5.5 sdk/sdkv2 resource/job auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.2/jobs/get", - "q": { - "job_id": "[NUMID]" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/terraform.tfstate", - "q": { - "overwrite": "true" - }, - "body": { - "version": 4, - "terraform_version": "1.5.5", - "serial": 6, - "lineage": "[UUID]", - "outputs": {}, - "resources": [ - { - "mode": "managed", - "type": "databricks_job", - "name": "job_one", - "provider": "provider[\"registry.terraform.io/databricks/databricks\"]", - "instances": [ - { - "schema_version": 2, - "attributes": { - "always_running": false, - "budget_policy_id": null, - "continuous": [], - "control_run_state": false, - "dbt_task": [], - "deployment": [ - { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - } - ], - "description": null, - "edit_mode": "UI_LOCKED", - "email_notifications": [ - { - "no_alert_for_skipped_runs": false, - "on_duration_warning_threshold_exceeded": [], - "on_failure": [], - "on_start": [], - "on_streaming_backlog_exceeded": [], - "on_success": [] - } - ], - "environment": [], - "existing_cluster_id": null, - "format": "MULTI_TASK", - "git_source": [], - "health": [], - "id": "[NUMID]", - "job_cluster": [], - "library": [], - "max_concurrent_runs": 1, - "max_retries": 0, - "min_retry_interval_millis": 0, - "name": "job one", - "new_cluster": [], - "notebook_task": [], - "notification_settings": [], - "parameter": [], - "performance_target": null, - "pipeline_task": [], - "provider_config": [], - "python_wheel_task": [], - "queue": [ - { - "enabled": true - } - ], - "retry_on_timeout": false, - "run_as": [ - { - "group_name": "", - "service_principal_name": "", - "user_name": "[USERNAME]" - } - ], - "run_job_task": [], - "schedule": [], - "spark_jar_task": [], - "spark_python_task": [], - "spark_submit_task": [], - "tags": null, - "task": [], - "timeout_seconds": 0, - "timeouts": null, - "trigger": [], - "url": "[DATABRICKS_URL]/#job/[NUMID]", - "usage_policy_id": null, - "webhook_notifications": [ - { - "on_duration_warning_threshold_exceeded": [], - "on_failure": [], - "on_start": [], - "on_streaming_backlog_exceeded": [], - "on_success": [] - } - ] - }, - "sensitive_attributes": [], - "private": "eyJlMmJmYjczMC1lY2FhLTExZTYtOGY4OC0zNDM2M2JjN2M0YzAiOnsiY3JlYXRlIjoxODAwMDAwMDAwMDAwLCJ1cGRhdGUiOjE4MDAwMDAwMDAwMDB9LCJzY2hlbWFfdmVyc2lvbiI6IjIifQ==" - } - ] - }, - { - "mode": "managed", - "type": "databricks_job", - "name": "job_three", - "provider": "provider[\"registry.terraform.io/databricks/databricks\"]", - "instances": [ - { - "schema_version": 2, - "attributes": { - "always_running": false, - "budget_policy_id": null, - "continuous": [], - "control_run_state": false, - "dbt_task": [], - "deployment": [ - { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - } - ], - "description": null, - "edit_mode": "UI_LOCKED", - "email_notifications": [ - { - "no_alert_for_skipped_runs": false, - "on_duration_warning_threshold_exceeded": [], - "on_failure": [], - "on_start": [], - "on_streaming_backlog_exceeded": [], - "on_success": [] - } - ], - "environment": [], - "existing_cluster_id": null, - "format": "MULTI_TASK", - "git_source": [], - "health": [], - "id": "[NUMID]", - "job_cluster": [], - "library": [], - "max_concurrent_runs": 1, - "max_retries": 0, - "min_retry_interval_millis": 0, - "name": "job three", - "new_cluster": [], - "notebook_task": [], - "notification_settings": [], - "parameter": [], - "performance_target": null, - "pipeline_task": [], - "provider_config": [], - "python_wheel_task": [], - "queue": [ - { - "enabled": true - } - ], - "retry_on_timeout": false, - "run_as": [ - { - "group_name": "", - "service_principal_name": "", - "user_name": "[USERNAME]" - } - ], - "run_job_task": [], - "schedule": [], - "spark_jar_task": [], - "spark_python_task": [], - "spark_submit_task": [], - "tags": null, - "task": [], - "timeout_seconds": 0, - "timeouts": null, - "trigger": [], - "url": "[DATABRICKS_URL]/#job/[NUMID]", - "usage_policy_id": null, - "webhook_notifications": [ - { - "on_duration_warning_threshold_exceeded": [], - "on_failure": [], - "on_start": [], - "on_streaming_backlog_exceeded": [], - "on_success": [] - } - ] - }, - "sensitive_attributes": [], - "private": "eyJlMmJmYjczMC1lY2FhLTExZTYtOGY4OC0zNDM2M2JjN2M0YzAiOnsiY3JlYXRlIjoxODAwMDAwMDAwMDAwLCJ1cGRhdGUiOjE4MDAwMDAwMDAwMDB9LCJzY2hlbWFfdmVyc2lvbiI6IjIifQ==" - } - ] - }, - { - "mode": "managed", - "type": "databricks_job", - "name": "job_two", - "provider": "provider[\"registry.terraform.io/databricks/databricks\"]", - "instances": [ - { - "schema_version": 2, - "attributes": { - "always_running": false, - "budget_policy_id": null, - "continuous": [], - "control_run_state": false, - "dbt_task": [], - "deployment": [ - { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - } - ], - "description": null, - "edit_mode": "UI_LOCKED", - "email_notifications": [ - { - "no_alert_for_skipped_runs": false, - "on_duration_warning_threshold_exceeded": [], - "on_failure": [], - "on_start": [], - "on_streaming_backlog_exceeded": [], - "on_success": [] - } - ], - "environment": [], - "existing_cluster_id": null, - "format": "MULTI_TASK", - "git_source": [], - "health": [], - "id": "[NUMID]", - "job_cluster": [], - "library": [], - "max_concurrent_runs": 1, - "max_retries": 0, - "min_retry_interval_millis": 0, - "name": "job two", - "new_cluster": [], - "notebook_task": [], - "notification_settings": [], - "parameter": [], - "performance_target": null, - "pipeline_task": [], - "provider_config": [], - "python_wheel_task": [], - "queue": [ - { - "enabled": true - } - ], - "retry_on_timeout": false, - "run_as": [ - { - "group_name": "", - "service_principal_name": "", - "user_name": "[USERNAME]" - } - ], - "run_job_task": [], - "schedule": [], - "spark_jar_task": [], - "spark_python_task": [], - "spark_submit_task": [], - "tags": null, - "task": [], - "timeout_seconds": 0, - "timeouts": null, - "trigger": [], - "url": "[DATABRICKS_URL]/#job/[NUMID]", - "usage_policy_id": null, - "webhook_notifications": [ - { - "on_duration_warning_threshold_exceeded": [], - "on_failure": [], - "on_start": [], - "on_streaming_backlog_exceeded": [], - "on_success": [] - } - ] - }, - "sensitive_attributes": [], - "private": "eyJlMmJmYjczMC1lY2FhLTExZTYtOGY4OC0zNDM2M2JjN2M0YzAiOnsiY3JlYXRlIjoxODAwMDAwMDAwMDAwLCJ1cGRhdGUiOjE4MDAwMDAwMDAwMDB9LCJzY2hlbWFfdmVyc2lvbiI6IjIifQ==" - } - ] - }, - { - "mode": "managed", - "type": "databricks_pipeline", - "name": "pipeline_one", - "provider": "provider[\"registry.terraform.io/databricks/databricks\"]", - "instances": [ - { - "schema_version": 0, - "attributes": { - "allow_duplicate_names": false, - "budget_policy_id": null, - "catalog": null, - "cause": null, - "channel": "CURRENT", - "cluster": [], - "cluster_id": null, - "configuration": null, - "continuous": false, - "creator_user_name": "[USERNAME]", - "deployment": [ - { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - } - ], - "development": false, - "edition": "ADVANCED", - "environment": [], - "event_log": [], - "expected_last_modified": 0, - "filters": [], - "gateway_definition": [], - "health": null, - "id": "[UUID]", - "ingestion_definition": [], - "last_modified": [UNIX_TIME_MILLIS][0], - "latest_updates": null, - "library": [], - "name": "pipeline one", - "notification": [], - "photon": false, - "provider_config": [], - "restart_window": [], - "root_path": null, - "run_as": [], - "run_as_user_name": "[USERNAME]", - "schema": null, - "serverless": false, - "state": "IDLE", - "storage": "dbfs:/pipelines/[UUID]", - "tags": null, - "target": null, - "timeouts": null, - "trigger": [], - "url": "[DATABRICKS_URL]/#joblist/pipelines/[UUID]", - "usage_policy_id": null - }, - "sensitive_attributes": [], - "private": "eyJlMmJmYjczMC1lY2FhLTExZTYtOGY4OC0zNDM2M2JjN2M0YzAiOnsiY3JlYXRlIjoxMjAwMDAwMDAwMDAwLCJkZWZhdWx0IjoxMjAwMDAwMDAwMDAwLCJkZWxldGUiOjEyMDAwMDAwMDAwMDAsInJlYWQiOjEyMDAwMDAwMDAwMDAsInVwZGF0ZSI6MTIwMDAwMDAwMDAwMH19" - } - ] - }, - { - "mode": "managed", - "type": "databricks_pipeline", - "name": "pipeline_two", - "provider": "provider[\"registry.terraform.io/databricks/databricks\"]", - "instances": [ - { - "schema_version": 0, - "attributes": { - "allow_duplicate_names": false, - "budget_policy_id": null, - "catalog": null, - "cause": null, - "channel": "CURRENT", - "cluster": [], - "cluster_id": null, - "configuration": null, - "continuous": false, - "creator_user_name": "[USERNAME]", - "deployment": [ - { - "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" - } - ], - "development": false, - "edition": "ADVANCED", - "environment": [], - "event_log": [], - "expected_last_modified": 0, - "filters": [], - "gateway_definition": [], - "health": null, - "id": "[UUID]", - "ingestion_definition": [], - "last_modified": [UNIX_TIME_MILLIS][1], - "latest_updates": null, - "library": [], - "name": "pipeline two", - "notification": [], - "photon": false, - "provider_config": [], - "restart_window": [], - "root_path": null, - "run_as": [], - "run_as_user_name": "[USERNAME]", - "schema": null, - "serverless": false, - "state": "IDLE", - "storage": "dbfs:/pipelines/[UUID]", - "tags": null, - "target": null, - "timeouts": null, - "trigger": [], - "url": "[DATABRICKS_URL]/#joblist/pipelines/[UUID]", - "usage_policy_id": null - }, - "sensitive_attributes": [], - "private": "eyJlMmJmYjczMC1lY2FhLTExZTYtOGY4OC0zNDM2M2JjN2M0YzAiOnsiY3JlYXRlIjoxMjAwMDAwMDAwMDAwLCJkZWZhdWx0IjoxMjAwMDAwMDAwMDAwLCJkZWxldGUiOjEyMDAwMDAwMDAwMDAsInJlYWQiOjEyMDAwMDAwMDAwMDAsInVwZGF0ZSI6MTIwMDAwMDAwMDAwMH19" - } - ] - } - ], - "check_results": null - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json", - "q": { - "overwrite": "true" - }, - "body": { - "version": 1, - "config": { - "bundle": { - "name": "test-bundle", - "target": "default", - "git": { - "bundle_root_path": "." - } - }, - "workspace": { - "file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files" - }, - "resources": { - "jobs": { - "job_one": { - "id": "[NUMID]", - "relative_path": "databricks.yml" - }, - "job_three": { - "id": "[NUMID]", - "relative_path": "databricks.yml" - }, - "job_two": { - "id": "[NUMID]", - "relative_path": "databricks.yml" - } - }, - "pipelines": { - "pipeline_one": { - "id": "[UUID]", - "relative_path": "databricks.yml" - }, - "pipeline_two": { - "id": "[UUID]", - "relative_path": "databricks.yml" - } - } - }, - "presets": { - "source_linked_deployment": false - } - }, - "extra": {} - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/workspace/get-status", - "q": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock", - "return_export_info": "true" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/workspace/get-status", - "q": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock", - "return_export_info": "true" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "GET", - "path": "/api/2.0/workspace/export", - "q": { - "direct_download": "true", - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/api/2.0/workspace/delete", - "body": { - "path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/deploy.lock" - } -} -{ - "headers": { - "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_deploy cmd-exec-id/[UUID] interactive/none engine/terraform auth/pat" - ] - }, - "method": "POST", - "path": "/telemetry-ext", - "body": { - "uploadTime": [UNIX_TIME_MILLIS][2], - "items": [], - "protoLogs": [ - "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_deploy\",\"operating_system\":\"[OS]\",\"execution_time_ms\":2367,\"exit_code\":0},\"bundle_deploy_event\":{\"bundle_uuid\":\"[UUID]\",\"deployment_id\":\"[UUID]\",\"resource_count\":5,\"resource_job_count\":3,\"resource_pipeline_count\":2,\"resource_model_count\":0,\"resource_experiment_count\":0,\"resource_model_serving_endpoint_count\":0,\"resource_registered_model_count\":0,\"resource_quality_monitor_count\":0,\"resource_schema_count\":0,\"resource_volume_count\":0,\"resource_cluster_count\":0,\"resource_dashboard_count\":0,\"resource_app_count\":0,\"resource_job_ids\":[\"[NUMID]\",\"[NUMID]\",\"[NUMID]\"],\"resource_pipeline_ids\":[\"[UUID]\",\"[UUID]\"],\"experimental\":{\"configuration_file_count\":1,\"variable_count\":0,\"complex_variable_count\":0,\"lookup_variable_count\":0,\"target_count\":1,\"bool_values\":[{\"key\":\"local.cache.attempt\",\"value\":true},{\"key\":\"local.cache.miss\",\"value\":true},{\"key\":\"experimental.use_legacy_run_as\",\"value\":false},{\"key\":\"run_as_set\",\"value\":false},{\"key\":\"presets_name_prefix_is_set\",\"value\":false},{\"key\":\"python_wheel_wrapper_is_set\",\"value\":false},{\"key\":\"skip_artifact_cleanup\",\"value\":false},{\"key\":\"has_serverless_compute\",\"value\":false},{\"key\":\"has_classic_job_compute\",\"value\":false},{\"key\":\"has_classic_interactive_compute\",\"value\":false}],\"bundle_mode\":\"TYPE_UNSPECIFIED\",\"workspace_artifact_path_type\":\"WORKSPACE_FILE_SYSTEM\",\"bundle_mutator_execution_time_ms\":[{\"key\":\"terraform.(plan)\",\"value\":1310},{\"key\":\"terraform.(apply)\",\"value\":732},{\"key\":\"phases.Initialize\",\"value\":15},{\"key\":\"files.(upload)\",\"value\":7},{\"key\":\"resourcemutator.(processStaticResources)\",\"value\":5},{\"key\":\"mutator.(initializeCache)\",\"value\":2},{\"key\":\"mutator.(populateCurrentUser)\",\"value\":2},{\"key\":\"phases.Build\",\"value\":2},{\"key\":\"lock.(acquire)\",\"value\":2},{\"key\":\"artifacts.(build)\",\"value\":1},{\"key\":\"terraform.(interpolateMutator)\",\"value\":1},{\"key\":\"validate.FastValidate\",\"value\":0}],\"local_cache_measurements_ms\":[{\"key\":\"local.cache.compute_duration\",\"value\":0}]}}}}}" - ] - } -} diff --git a/acceptance/bundle/telemetry/deploy/out.telemetry.txt b/acceptance/bundle/telemetry/deploy/out.telemetry.txt index d13c1c4ddc..f945233dd1 100644 --- a/acceptance/bundle/telemetry/deploy/out.telemetry.txt +++ b/acceptance/bundle/telemetry/deploy/out.telemetry.txt @@ -3,7 +3,7 @@ "entry": { "databricks_cli_log": { "execution_context": { - "cmd_exec_id": "[UUID]", + "cmd_exec_id": "[CMD-EXEC-ID]", "version": "[DEV_VERSION]", "command": "bundle_deploy", "operating_system": "[OS]", @@ -11,8 +11,8 @@ "exit_code": 0 }, "bundle_deploy_event": { - "bundle_uuid": "[UUID]", - "deployment_id": "[UUID]", + "bundle_uuid": "[BUNDLE_UUID]", + "deployment_id": "[DEPLOYMENT_ID]", "resource_count": 5, "resource_job_count": 3, "resource_pipeline_count": 2, diff --git a/acceptance/bundle/telemetry/deploy/output.txt b/acceptance/bundle/telemetry/deploy/output.txt index b20bfb1b39..cf5dd1e434 100644 --- a/acceptance/bundle/telemetry/deploy/output.txt +++ b/acceptance/bundle/telemetry/deploy/output.txt @@ -10,6 +10,3 @@ Deployment complete! === Assert that mutator execution times are being recorded >>> cat telemetry.json true -No command execution ID found in User-Agent: cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] - -Exit code: 1 diff --git a/acceptance/bundle/telemetry/test.toml b/acceptance/bundle/telemetry/test.toml index d47cfd33e3..14453c07f9 100644 --- a/acceptance/bundle/telemetry/test.toml +++ b/acceptance/bundle/telemetry/test.toml @@ -8,6 +8,11 @@ DATABRICKS_CACHE_ENABLED = 'false' Old = '"execution_time_ms": \d{1,5},' New = '"execution_time_ms": SMALL_INT,' +# Same replacement for compact JSON inside protoLogs strings (no space after colon). +[[Repls]] +Old = 'execution_time_ms\\":\d{1,5},' +New = 'execution_time_ms\\":SMALL_INT,' + [[Repls]] Old = '(linux|darwin|windows)' New = '[OS]' diff --git a/acceptance/bundle/templates/pydabs/check-formatting/output.txt b/acceptance/bundle/templates/pydabs/check-formatting/output.txt index 09fedb708e..29db0fde3c 100644 --- a/acceptance/bundle/templates/pydabs/check-formatting/output.txt +++ b/acceptance/bundle/templates/pydabs/check-formatting/output.txt @@ -14,6 +14,8 @@ To get started, refer to the project README.md file and the documentation at htt Checking code formatting with ruff format --line-length 88 >>> ruff format --isolated --line-length 88 --quiet --diff --check my_pydabs -script: line 45: ruff: command not found +Checking code formatting with ruff format --line-length 120 -Exit code: 127 +>>> ruff format --isolated --line-length 120 --quiet --diff --check my_pydabs + +>>> yamlcheck.py diff --git a/acceptance/bundle/templates/telemetry/custom-template/out.databricks.yml b/acceptance/bundle/templates/telemetry/custom-template/out.databricks.yml index f5c8663446..6ca7916ed1 100644 --- a/acceptance/bundle/templates/telemetry/custom-template/out.databricks.yml +++ b/acceptance/bundle/templates/telemetry/custom-template/out.databricks.yml @@ -1,2 +1,2 @@ bundle: - uuid: [UUID] + uuid: [BUNDLE-UUID] diff --git a/acceptance/bundle/templates/telemetry/custom-template/out.requests.txt b/acceptance/bundle/templates/telemetry/custom-template/out.requests.txt index 8ae8e61447..321ab066c5 100644 --- a/acceptance/bundle/templates/telemetry/custom-template/out.requests.txt +++ b/acceptance/bundle/templates/telemetry/custom-template/out.requests.txt @@ -10,7 +10,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" ] }, "method": "POST", @@ -19,7 +19,7 @@ "uploadTime": [UNIX_TIME_MILLIS], "items": [], "protoLogs": [ - "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[UUID]\",\"template_name\":\"custom\"}}}}" + "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[CMD-EXEC-ID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[BUNDLE-UUID]\",\"template_name\":\"custom\"}}}}" ] } } diff --git a/acceptance/bundle/templates/telemetry/custom-template/output.txt b/acceptance/bundle/templates/telemetry/custom-template/output.txt index a272f811e3..6c81fc77dc 100644 --- a/acceptance/bundle/templates/telemetry/custom-template/output.txt +++ b/acceptance/bundle/templates/telemetry/custom-template/output.txt @@ -1,6 +1,24 @@ >>> [CLI] bundle init . --config-file input.json --output-dir output ✨ Successfully initialized template -No command execution ID found in User-Agent: cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] -Exit code: 1 +>>> cat out.requests.txt +{ + "frontend_log_event_id": "[UUID]", + "entry": { + "databricks_cli_log": { + "execution_context": { + "cmd_exec_id": "[CMD-EXEC-ID]", + "version": "[DEV_VERSION]", + "command": "bundle_init", + "operating_system": "[OS]", + "execution_time_ms": SMALL_INT, + "exit_code": 0 + }, + "bundle_init_event": { + "bundle_uuid": "[BUNDLE-UUID]", + "template_name": "custom" + } + } + } +} diff --git a/acceptance/bundle/templates/telemetry/dbt-sql/out.databricks.yml b/acceptance/bundle/templates/telemetry/dbt-sql/out.databricks.yml index 5f2a8cd93b..d4e7a980b0 100644 --- a/acceptance/bundle/templates/telemetry/dbt-sql/out.databricks.yml +++ b/acceptance/bundle/templates/telemetry/dbt-sql/out.databricks.yml @@ -3,7 +3,7 @@ # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation. bundle: name: my_dbt_sql - uuid: [UUID] + uuid: [BUNDLE-UUID] include: - resources/*.yml diff --git a/acceptance/bundle/templates/telemetry/dbt-sql/out.requests.txt b/acceptance/bundle/templates/telemetry/dbt-sql/out.requests.txt index a89e6dae56..34ccec2899 100644 --- a/acceptance/bundle/templates/telemetry/dbt-sql/out.requests.txt +++ b/acceptance/bundle/templates/telemetry/dbt-sql/out.requests.txt @@ -10,7 +10,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" ] }, "method": "GET", @@ -19,7 +19,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" ] }, "method": "GET", @@ -28,7 +28,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" ] }, "method": "POST", @@ -37,7 +37,7 @@ "uploadTime": [UNIX_TIME_MILLIS], "items": [], "protoLogs": [ - "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[UUID]\",\"template_name\":\"dbt-sql\",\"template_enum_args\":[{\"key\":\"personal_schemas\",\"value\":\"yes\"},{\"key\":\"serverless\",\"value\":\"yes\"}]}}}}" + "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[CMD-EXEC-ID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[BUNDLE-UUID]\",\"template_name\":\"dbt-sql\",\"template_enum_args\":[{\"key\":\"personal_schemas\",\"value\":\"yes\"},{\"key\":\"serverless\",\"value\":\"yes\"}]}}}}" ] } } diff --git a/acceptance/bundle/templates/telemetry/dbt-sql/output.txt b/acceptance/bundle/templates/telemetry/dbt-sql/output.txt index a9fe878d73..092d7557f2 100644 --- a/acceptance/bundle/templates/telemetry/dbt-sql/output.txt +++ b/acceptance/bundle/templates/telemetry/dbt-sql/output.txt @@ -8,6 +8,34 @@ workspace_host: [DATABRICKS_URL] If you already have dbt installed, just type 'cd my_dbt_sql; dbt init' to get started. Refer to the README.md file for full "getting started" guide and production setup instructions. -No command execution ID found in User-Agent: cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] -Exit code: 1 +>>> cat out.requests.txt +{ + "frontend_log_event_id": "[UUID]", + "entry": { + "databricks_cli_log": { + "execution_context": { + "cmd_exec_id": "[CMD-EXEC-ID]", + "version": "[DEV_VERSION]", + "command": "bundle_init", + "operating_system": "[OS]", + "execution_time_ms": SMALL_INT, + "exit_code": 0 + }, + "bundle_init_event": { + "bundle_uuid": "[BUNDLE-UUID]", + "template_name": "dbt-sql", + "template_enum_args": [ + { + "key": "personal_schemas", + "value": "yes" + }, + { + "key": "serverless", + "value": "yes" + } + ] + } + } + } +} diff --git a/acceptance/bundle/templates/telemetry/default-python/out.databricks.yml b/acceptance/bundle/templates/telemetry/default-python/out.databricks.yml index fe79065310..2e00b00b9e 100644 --- a/acceptance/bundle/templates/telemetry/default-python/out.databricks.yml +++ b/acceptance/bundle/templates/telemetry/default-python/out.databricks.yml @@ -2,7 +2,7 @@ # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation. bundle: name: my_default_python - uuid: [UUID] + uuid: [BUNDLE-UUID] include: - resources/*.yml diff --git a/acceptance/bundle/templates/telemetry/default-python/out.requests.txt b/acceptance/bundle/templates/telemetry/default-python/out.requests.txt index 9a32f8d21f..932cb465bc 100644 --- a/acceptance/bundle/templates/telemetry/default-python/out.requests.txt +++ b/acceptance/bundle/templates/telemetry/default-python/out.requests.txt @@ -10,7 +10,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" ] }, "method": "GET", @@ -19,7 +19,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" ] }, "method": "GET", @@ -28,7 +28,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" ] }, "method": "POST", @@ -37,7 +37,7 @@ "uploadTime": [UNIX_TIME_MILLIS], "items": [], "protoLogs": [ - "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[UUID]\",\"template_name\":\"default-python\",\"template_enum_args\":[{\"key\":\"include_job\",\"value\":\"yes\"},{\"key\":\"include_pipeline\",\"value\":\"yes\"},{\"key\":\"include_python\",\"value\":\"yes\"},{\"key\":\"language\",\"value\":\"python\"},{\"key\":\"personal_schemas\",\"value\":\"yes\"},{\"key\":\"serverless\",\"value\":\"no\"}]}}}}" + "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[CMD-EXEC-ID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[BUNDLE-UUID]\",\"template_name\":\"default-python\",\"template_enum_args\":[{\"key\":\"include_job\",\"value\":\"yes\"},{\"key\":\"include_pipeline\",\"value\":\"yes\"},{\"key\":\"include_python\",\"value\":\"yes\"},{\"key\":\"language\",\"value\":\"python\"},{\"key\":\"personal_schemas\",\"value\":\"yes\"},{\"key\":\"serverless\",\"value\":\"no\"}]}}}}" ] } } diff --git a/acceptance/bundle/templates/telemetry/default-python/output.txt b/acceptance/bundle/templates/telemetry/default-python/output.txt index bafdebbc76..c1f9e4f082 100644 --- a/acceptance/bundle/templates/telemetry/default-python/output.txt +++ b/acceptance/bundle/templates/telemetry/default-python/output.txt @@ -9,6 +9,50 @@ Note that [DATABRICKS_URL] is used for initialization. ✨ Your new project has been created in the 'my_default_python' directory! To get started, refer to the project README.md file and the documentation at https://docs.databricks.com/dev-tools/bundles/index.html. -No command execution ID found in User-Agent: cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] -Exit code: 1 +>>> cat out.requests.txt +{ + "frontend_log_event_id": "[UUID]", + "entry": { + "databricks_cli_log": { + "execution_context": { + "cmd_exec_id": "[CMD-EXEC-ID]", + "version": "[DEV_VERSION]", + "command": "bundle_init", + "operating_system": "[OS]", + "execution_time_ms": SMALL_INT, + "exit_code": 0 + }, + "bundle_init_event": { + "bundle_uuid": "[BUNDLE-UUID]", + "template_name": "default-python", + "template_enum_args": [ + { + "key": "include_job", + "value": "yes" + }, + { + "key": "include_pipeline", + "value": "yes" + }, + { + "key": "include_python", + "value": "yes" + }, + { + "key": "language", + "value": "python" + }, + { + "key": "personal_schemas", + "value": "yes" + }, + { + "key": "serverless", + "value": "no" + } + ] + } + } + } +} diff --git a/acceptance/bundle/templates/telemetry/default-sql/out.databricks.yml b/acceptance/bundle/templates/telemetry/default-sql/out.databricks.yml index 07562ce7ad..49704391ea 100644 --- a/acceptance/bundle/templates/telemetry/default-sql/out.databricks.yml +++ b/acceptance/bundle/templates/telemetry/default-sql/out.databricks.yml @@ -2,7 +2,7 @@ # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation. bundle: name: my_default_sql - uuid: [UUID] + uuid: [BUNDLE-UUID] include: - resources/*.yml diff --git a/acceptance/bundle/templates/telemetry/default-sql/out.requests.txt b/acceptance/bundle/templates/telemetry/default-sql/out.requests.txt index ee8e310195..459bbc63e6 100644 --- a/acceptance/bundle/templates/telemetry/default-sql/out.requests.txt +++ b/acceptance/bundle/templates/telemetry/default-sql/out.requests.txt @@ -10,7 +10,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" ] }, "method": "GET", @@ -19,7 +19,7 @@ { "headers": { "User-Agent": [ - "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[UUID] interactive/none auth/pat" + "cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/bundle_init cmd-exec-id/[CMD-EXEC-ID] interactive/none auth/pat" ] }, "method": "POST", @@ -28,7 +28,7 @@ "uploadTime": [UNIX_TIME_MILLIS], "items": [], "protoLogs": [ - "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[UUID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[UUID]\",\"template_name\":\"default-sql\",\"template_enum_args\":[{\"key\":\"personal_schemas\",\"value\":\"yes\"}]}}}}" + "{\"frontend_log_event_id\":\"[UUID]\",\"entry\":{\"databricks_cli_log\":{\"execution_context\":{\"cmd_exec_id\":\"[CMD-EXEC-ID]\",\"version\":\"[DEV_VERSION]\",\"command\":\"bundle_init\",\"operating_system\":\"[OS]\",\"execution_time_ms\":\"SMALL_INT\",\"exit_code\":0},\"bundle_init_event\":{\"bundle_uuid\":\"[BUNDLE-UUID]\",\"template_name\":\"default-sql\",\"template_enum_args\":[{\"key\":\"personal_schemas\",\"value\":\"yes\"}]}}}}" ] } } diff --git a/acceptance/bundle/templates/telemetry/default-sql/output.txt b/acceptance/bundle/templates/telemetry/default-sql/output.txt index b55e38d4b7..907e24fc87 100644 --- a/acceptance/bundle/templates/telemetry/default-sql/output.txt +++ b/acceptance/bundle/templates/telemetry/default-sql/output.txt @@ -8,6 +8,30 @@ workspace_host: [DATABRICKS_URL] Please refer to the README.md file for "getting started" instructions. See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html. -No command execution ID found in User-Agent: cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] -Exit code: 1 +>>> cat out.requests.txt +{ + "frontend_log_event_id": "[UUID]", + "entry": { + "databricks_cli_log": { + "execution_context": { + "cmd_exec_id": "[CMD-EXEC-ID]", + "version": "[DEV_VERSION]", + "command": "bundle_init", + "operating_system": "[OS]", + "execution_time_ms": SMALL_INT, + "exit_code": 0 + }, + "bundle_init_event": { + "bundle_uuid": "[BUNDLE-UUID]", + "template_name": "default-sql", + "template_enum_args": [ + { + "key": "personal_schemas", + "value": "yes" + } + ] + } + } + } +} From bde633bf1d0837b6a96c9172f4c55b2499c8341e Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 16:48:02 +0100 Subject: [PATCH 13/17] Fix flaky auth/switch/nominal test by mocking /.well-known/databricks-config The SDK bump introduced host metadata resolution during EnsureResolved(), which fetches /.well-known/databricks-config from each profile's host. The test used fake DNS hostnames that caused non-deterministic warning ordering. Fix by adding a default handler for the endpoint and pointing test profiles at the mock server. Co-authored-by: Isaac --- .../cmd/auth/switch/nominal/out.databrickscfg | 4 ++-- acceptance/cmd/auth/switch/nominal/output.txt | 16 ++++++---------- acceptance/cmd/auth/switch/nominal/script | 6 +++--- libs/testserver/handlers.go | 6 ++++++ 4 files changed, 17 insertions(+), 15 deletions(-) diff --git a/acceptance/cmd/auth/switch/nominal/out.databrickscfg b/acceptance/cmd/auth/switch/nominal/out.databrickscfg index 81d6359f16..6b07c2a321 100644 --- a/acceptance/cmd/auth/switch/nominal/out.databrickscfg +++ b/acceptance/cmd/auth/switch/nominal/out.databrickscfg @@ -2,12 +2,12 @@ [DEFAULT] [profile-a] -host = https://profile-a.cloud.databricks.com +host = [DATABRICKS_URL] token = token-a auth_type = pat [profile-b] -host = https://profile-b.cloud.databricks.com +host = [DATABRICKS_URL] token = token-b auth_type = pat diff --git a/acceptance/cmd/auth/switch/nominal/output.txt b/acceptance/cmd/auth/switch/nominal/output.txt index 7a39d54ad3..00f83abfaa 100644 --- a/acceptance/cmd/auth/switch/nominal/output.txt +++ b/acceptance/cmd/auth/switch/nominal/output.txt @@ -11,11 +11,9 @@ default_profile = profile-a === Profiles after first switch >>> [CLI] auth profiles --skip-validate -Warn: Failed to resolve host metadata: fetching host metadata from "https://profile-a.cloud.databricks.com/.well-known/databricks-config": Get "https://profile-a.cloud.databricks.com/.well-known/databricks-config": dial tcp: lookup profile-a.cloud.databricks.com: no such host. Falling back to user config. -Warn: Failed to resolve host metadata: fetching host metadata from "https://profile-b.cloud.databricks.com/.well-known/databricks-config": Get "https://profile-b.cloud.databricks.com/.well-known/databricks-config": dial tcp: lookup profile-b.cloud.databricks.com: no such host. Falling back to user config. -Name Host Valid -profile-a (Default) https://profile-a.cloud.databricks.com NO -profile-b https://profile-b.cloud.databricks.com NO +Name Host Valid +profile-a (Default) [DATABRICKS_URL] NO +profile-b [DATABRICKS_URL] NO === Switch to profile-b @@ -29,8 +27,6 @@ default_profile = profile-b === Profiles after second switch >>> [CLI] auth profiles --skip-validate -Warn: Failed to resolve host metadata: fetching host metadata from "https://profile-b.cloud.databricks.com/.well-known/databricks-config": Get "https://profile-b.cloud.databricks.com/.well-known/databricks-config": dial tcp: lookup profile-b.cloud.databricks.com: no such host. Falling back to user config. -Warn: Failed to resolve host metadata: fetching host metadata from "https://profile-a.cloud.databricks.com/.well-known/databricks-config": Get "https://profile-a.cloud.databricks.com/.well-known/databricks-config": dial tcp: lookup profile-a.cloud.databricks.com: no such host. Falling back to user config. -Name Host Valid -profile-a https://profile-a.cloud.databricks.com NO -profile-b (Default) https://profile-b.cloud.databricks.com NO +Name Host Valid +profile-a [DATABRICKS_URL] NO +profile-b (Default) [DATABRICKS_URL] NO diff --git a/acceptance/cmd/auth/switch/nominal/script b/acceptance/cmd/auth/switch/nominal/script index ea224abd93..625ee0af8f 100644 --- a/acceptance/cmd/auth/switch/nominal/script +++ b/acceptance/cmd/auth/switch/nominal/script @@ -1,17 +1,17 @@ sethome "./home" # Create two profiles without a [__settings__] section. -cat > "./home/.databrickscfg" <<'EOF' +cat > "./home/.databrickscfg" < Date: Fri, 20 Mar 2026 19:53:18 +0100 Subject: [PATCH 14/17] Update record_cloud expected outputs to include .well-known/databricks-config requests --- .../record_cloud/basic/out.requests.txt | 4 ++ .../record_cloud/error/out.requests.txt | 4 ++ .../record_cloud/pipeline-crud/output.txt | 24 ++++++++++ .../record_cloud/workspace-file-io/output.txt | 44 +++++++++++++++++++ 4 files changed, 76 insertions(+) diff --git a/acceptance/selftest/record_cloud/basic/out.requests.txt b/acceptance/selftest/record_cloud/basic/out.requests.txt index ec41b6c5f6..12576f503d 100644 --- a/acceptance/selftest/record_cloud/basic/out.requests.txt +++ b/acceptance/selftest/record_cloud/basic/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/preview/scim/v2/Me" diff --git a/acceptance/selftest/record_cloud/error/out.requests.txt b/acceptance/selftest/record_cloud/error/out.requests.txt index 8c02528505..4b72027600 100644 --- a/acceptance/selftest/record_cloud/error/out.requests.txt +++ b/acceptance/selftest/record_cloud/error/out.requests.txt @@ -1,3 +1,7 @@ +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.2/jobs/get", diff --git a/acceptance/selftest/record_cloud/pipeline-crud/output.txt b/acceptance/selftest/record_cloud/pipeline-crud/output.txt index 2aef802cd9..c62fb4a463 100644 --- a/acceptance/selftest/record_cloud/pipeline-crud/output.txt +++ b/acceptance/selftest/record_cloud/pipeline-crud/output.txt @@ -2,6 +2,10 @@ === Create a pipeline >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/pipelines", @@ -23,6 +27,10 @@ "test-pipeline-1" >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/pipelines/[UUID]" @@ -32,6 +40,10 @@ >>> [CLI] pipelines update [UUID] --json @pipeline2.json >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "PUT", "path": "/api/2.0/pipelines/[UUID]", @@ -53,6 +65,10 @@ "test-pipeline-2" >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/pipelines/[UUID]" @@ -62,6 +78,10 @@ >>> [CLI] pipelines delete [UUID] >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "DELETE", "path": "/api/2.0/pipelines/[UUID]" @@ -74,6 +94,10 @@ Error: The specified pipeline [UUID] was not found. Exit code: 1 >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/pipelines/[UUID]" diff --git a/acceptance/selftest/record_cloud/workspace-file-io/output.txt b/acceptance/selftest/record_cloud/workspace-file-io/output.txt index 48d9b41810..d08a332e6a 100644 --- a/acceptance/selftest/record_cloud/workspace-file-io/output.txt +++ b/acceptance/selftest/record_cloud/workspace-file-io/output.txt @@ -3,10 +3,18 @@ >>> [CLI] workspace mkdirs /Users/[USERNAME]/[UNIQUE_NAME] >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/preview/scim/v2/Me" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/workspace/mkdirs", @@ -19,6 +27,10 @@ >>> [CLI] workspace import /Users/[USERNAME]/[UNIQUE_NAME]/hello.txt --format AUTO --file ./hello.txt >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/workspace/import", @@ -37,6 +49,10 @@ } >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/list", @@ -53,6 +69,10 @@ } >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/get-status", @@ -68,6 +88,10 @@ hello, world >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/export", @@ -81,6 +105,10 @@ hello, world >>> [CLI] workspace delete /Users/[USERNAME]/[UNIQUE_NAME]/hello.txt >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/workspace/delete", @@ -96,6 +124,10 @@ Error: Path (/Users/[USERNAME]/[UNIQUE_NAME]/hello.txt) doesn't exist. Exit code: 1 >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/get-status", @@ -112,6 +144,10 @@ ID Type Language Path >>> [CLI] workspace delete /Users/[USERNAME]/[UNIQUE_NAME] >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/list", @@ -119,6 +155,10 @@ ID Type Language Path "path": "/Users/[USERNAME]/[UNIQUE_NAME]" } } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.0/workspace/delete", @@ -134,6 +174,10 @@ Error: Path (/Users/[USERNAME]/[UNIQUE_NAME]) doesn't exist. Exit code: 1 >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/workspace/list", From 711dee770fae57dc0a3e08887112a626f672ce7b Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 20:36:52 +0100 Subject: [PATCH 15/17] Address denik's review feedback on PR #4799 - Exclude tagging.yml dependencies from dependabot (externally managed) - Redact .well-known host metadata warnings in auth/bundle_and_profile test - Consolidate .well-known handler: use consistent workspace_id (470123456789500), remove duplicate handler from handlers.go Co-authored-by: Isaac --- .github/dependabot.yml | 4 ++++ acceptance/auth/bundle_and_profile/output.txt | 4 ++-- acceptance/auth/bundle_and_profile/test.toml | 4 ++++ libs/testserver/handlers.go | 6 ------ libs/testserver/server.go | 4 ++-- 5 files changed, 12 insertions(+), 10 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index c802a7050d..378d63e75a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -15,3 +15,7 @@ updates: - "/.github/actions/setup-build-environment" schedule: interval: "monthly" + # tagging.yml is generated and maintained externally. Ignore + # actions/create-github-app-token since it is only used in tagging.yml. + ignore: + - dependency-name: "actions/create-github-app-token" diff --git a/acceptance/auth/bundle_and_profile/output.txt b/acceptance/auth/bundle_and_profile/output.txt index c1821d061b..88deef1256 100644 --- a/acceptance/auth/bundle_and_profile/output.txt +++ b/acceptance/auth/bundle_and_profile/output.txt @@ -13,7 +13,7 @@ === Inside the bundle, profile flag not matching bundle host. Should use profile from the flag and not the bundle. >>> errcode [CLI] current-user me -p profile_name -Warn: Failed to resolve host metadata: fetching host metadata from "https://non.existing.subdomain.databricks.com/.well-known/databricks-config": Get "https://non.existing.subdomain.databricks.com/.well-known/databricks-config": tls: failed to verify certificate: x509: certificate is valid for *.cloud.databricks.com, *.databricks.com, cloud.databricks.com, marketplace.databricks.com, login.databricks.com, signup.databricks.com, signin.databricks.com, not non.existing.subdomain.databricks.com. Falling back to user config. +Warn: Failed to resolve host metadata: (redacted). Falling back to user config. Error: Get "https://non.existing.subdomain.databricks.com/api/2.0/preview/scim/v2/Me": (redacted) Exit code: 1 @@ -73,7 +73,7 @@ Validation OK! === Bundle commands load bundle configuration with -t and -p flag, validation not OK (profile host don't match bundle host) >>> errcode [CLI] bundle validate -t prod -p DEFAULT -Warn: Failed to resolve host metadata: fetching host metadata from "https://bar.com/.well-known/databricks-config": received HTML response instead of JSON. Falling back to user config. +Warn: Failed to resolve host metadata: (redacted). Falling back to user config. Error: cannot resolve bundle auth configuration: the host in the profile ([DATABRICKS_TARGET]) doesn’t match the host configured in the bundle (https://bar.com) Name: test-auth diff --git a/acceptance/auth/bundle_and_profile/test.toml b/acceptance/auth/bundle_and_profile/test.toml index 92458e9d30..477e83a18d 100644 --- a/acceptance/auth/bundle_and_profile/test.toml +++ b/acceptance/auth/bundle_and_profile/test.toml @@ -9,6 +9,10 @@ New='DATABRICKS_TARGET' Old='DATABRICKS_URL' New='DATABRICKS_TARGET' +[[Repls]] +Old='Warn: Failed to resolve host metadata: .*\. Falling back to user config\.' +New='Warn: Failed to resolve host metadata: (redacted). Falling back to user config.' + [[Repls]] Old='Get "https://non.existing.subdomain.databricks.com/api/2.0/preview/scim/v2/Me": .*' New='Get "https://non.existing.subdomain.databricks.com/api/2.0/preview/scim/v2/Me": (redacted)' diff --git a/libs/testserver/handlers.go b/libs/testserver/handlers.go index 917a2c8cbf..9e30cb5f0c 100644 --- a/libs/testserver/handlers.go +++ b/libs/testserver/handlers.go @@ -265,12 +265,6 @@ func AddDefaultHandlers(server *Server) { return MapList(req.Workspace, req.Workspace.JobRuns, "runs") }) - server.Handle("GET", "/.well-known/databricks-config", func(_ Request) any { - return map[string]any{ - "oidc_endpoint": server.URL + "/oidc", - } - }) - server.Handle("GET", "/oidc/.well-known/oauth-authorization-server", func(_ Request) any { return server.fakeOidc.OidcEndpoints() }) diff --git a/libs/testserver/server.go b/libs/testserver/server.go index a2d28a3738..2d7048dc8d 100644 --- a/libs/testserver/server.go +++ b/libs/testserver/server.go @@ -265,9 +265,9 @@ Response.Body = '' // handler, any test that creates an SDK client against this server would fail // with "No handler for URL: /.well-known/databricks-config". s.Handle("GET", "/.well-known/databricks-config", func(_ Request) any { - return map[string]string{ + return map[string]any{ "oidc_endpoint": server.URL + "/oidc", - "workspace_id": "12345678", + "workspace_id": "470123456789500", } }) From 809b73ece3e36400791617d66e75edfa091c53e5 Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 21:18:53 +0100 Subject: [PATCH 16/17] Update record_cloud volume-io expected outputs to include .well-known/databricks-config requests Co-authored-by: Isaac --- .../record_cloud/volume-io/output.txt | 68 +++++++++++++++++++ 1 file changed, 68 insertions(+) diff --git a/acceptance/selftest/record_cloud/volume-io/output.txt b/acceptance/selftest/record_cloud/volume-io/output.txt index c059a8ff20..8b8df2099e 100644 --- a/acceptance/selftest/record_cloud/volume-io/output.txt +++ b/acceptance/selftest/record_cloud/volume-io/output.txt @@ -6,6 +6,10 @@ } >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.1/unity-catalog/schemas", @@ -22,6 +26,10 @@ } >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.1/unity-catalog/schemas/main.schema-[UNIQUE_NAME]" @@ -34,6 +42,10 @@ } >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "POST", "path": "/api/2.1/unity-catalog/volumes", @@ -52,6 +64,10 @@ } >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.1/unity-catalog/volumes/main.schema-[UNIQUE_NAME].volume-[UNIQUE_NAME]" @@ -61,14 +77,26 @@ ./hello.txt -> dbfs:/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]/hello.txt >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "HEAD", "path": "/api/2.0/fs/directories/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "HEAD", "path": "/api/2.0/fs/directories/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "PUT", "path": "/api/2.0/fs/files/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]/hello.txt", @@ -82,6 +110,10 @@ hello.txt >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/fs/directories/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]" @@ -90,6 +122,10 @@ hello.txt >>> [CLI] fs cat dbfs:/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]/hello.txt hello, world >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/fs/files/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]/hello.txt" @@ -98,14 +134,26 @@ hello, world >>> [CLI] fs rm dbfs:/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]/hello.txt >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "HEAD", "path": "/api/2.0/fs/directories/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]/hello.txt" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "HEAD", "path": "/api/2.0/fs/files/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]/hello.txt" } +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "DELETE", "path": "/api/2.0/fs/files/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]/hello.txt" @@ -114,6 +162,10 @@ hello, world >>> [CLI] fs ls dbfs:/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME] >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.0/fs/directories/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]" @@ -122,6 +174,10 @@ hello, world >>> [CLI] volumes delete main.schema-[UNIQUE_NAME].volume-[UNIQUE_NAME] >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "DELETE", "path": "/api/2.1/unity-catalog/volumes/main.schema-[UNIQUE_NAME].volume-[UNIQUE_NAME]" @@ -133,6 +189,10 @@ Error: Volume 'main.schema-[UNIQUE_NAME].volume-[UNIQUE_NAME]' does not exist. Exit code: 1 >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.1/unity-catalog/volumes/main.schema-[UNIQUE_NAME].volume-[UNIQUE_NAME]" @@ -141,6 +201,10 @@ Exit code: 1 >>> [CLI] schemas delete main.schema-[UNIQUE_NAME] >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "DELETE", "path": "/api/2.1/unity-catalog/schemas/main.schema-[UNIQUE_NAME]" @@ -152,6 +216,10 @@ Error: Schema 'main.schema-[UNIQUE_NAME]' does not exist. Exit code: 1 >>> print_requests +{ + "method": "GET", + "path": "/.well-known/databricks-config" +} { "method": "GET", "path": "/api/2.1/unity-catalog/schemas/main.schema-[UNIQUE_NAME]" From 0a5aad88dae79b5f24765b271125e8af2595748a Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 Mar 2026 22:36:29 +0100 Subject: [PATCH 17/17] Remove 4 incorrect .well-known/databricks-config entries from volume-io expected output The SDK caches the .well-known response per client, so consecutive HTTP requests within a single CLI command (fs cp, fs rm) do not each produce a separate .well-known lookup. Removed the extra entries before the second HEAD, PUT, and DELETE requests in the upload and delete sections. Co-authored-by: Isaac --- .../selftest/record_cloud/volume-io/output.txt | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/acceptance/selftest/record_cloud/volume-io/output.txt b/acceptance/selftest/record_cloud/volume-io/output.txt index 8b8df2099e..bcce9a7f89 100644 --- a/acceptance/selftest/record_cloud/volume-io/output.txt +++ b/acceptance/selftest/record_cloud/volume-io/output.txt @@ -85,18 +85,10 @@ "method": "HEAD", "path": "/api/2.0/fs/directories/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]" } -{ - "method": "GET", - "path": "/.well-known/databricks-config" -} { "method": "HEAD", "path": "/api/2.0/fs/directories/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]" } -{ - "method": "GET", - "path": "/.well-known/databricks-config" -} { "method": "PUT", "path": "/api/2.0/fs/files/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]/hello.txt", @@ -142,18 +134,10 @@ hello, world "method": "HEAD", "path": "/api/2.0/fs/directories/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]/hello.txt" } -{ - "method": "GET", - "path": "/.well-known/databricks-config" -} { "method": "HEAD", "path": "/api/2.0/fs/files/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]/hello.txt" } -{ - "method": "GET", - "path": "/.well-known/databricks-config" -} { "method": "DELETE", "path": "/api/2.0/fs/files/Volumes/main/schema-[UNIQUE_NAME]/volume-[UNIQUE_NAME]/hello.txt"