Skip to content

Commit 53bf05c

Browse files
committed
[Python] Support schemas resource type
1 parent b2c1182 commit 53bf05c

File tree

17 files changed

+374
-2
lines changed

17 files changed

+374
-2
lines changed
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
bundle:
2+
name: my_project
3+
4+
sync: {paths: []} # don't need to copy files
5+
6+
experimental:
7+
python:
8+
resources:
9+
- "resources:load_resources"
10+
mutators:
11+
- "mutators:update_schema"
12+
13+
resources:
14+
schemas:
15+
my_schema_1:
16+
name: "My Schema"
17+
catalog_name: "my_catalog"
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
from dataclasses import replace
2+
3+
from databricks.bundles.core import schema_mutator
4+
from databricks.bundles.schemas import Schema
5+
6+
7+
@schema_mutator
8+
def update_schema(schema: Schema) -> Schema:
9+
assert isinstance(schema.name, str)
10+
11+
return replace(schema, name=f"{schema.name} (updated)")
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
Local = true
2+
Cloud = false
3+
4+
[EnvMatrix]
5+
DATABRICKS_CLI_DEPLOYMENT = ["terraform", "direct-exp"]
6+
UV_ARGS = ["--with-requirements requirements-latest.txt --no-cache"]
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
2+
>>> uv run --with-requirements requirements-latest.txt --no-cache -q [CLI] bundle validate --output json
3+
{
4+
"experimental": {
5+
"python": {
6+
"mutators": [
7+
"mutators:update_schema"
8+
],
9+
"resources": [
10+
"resources:load_resources"
11+
]
12+
}
13+
},
14+
"resources": {
15+
"schemas": {
16+
"my_schema_1": {
17+
"catalog_name": "my_catalog",
18+
"name": "My Schema (updated)"
19+
},
20+
"my_schema_2": {
21+
"catalog_name": "my_catalog_2",
22+
"name": "My Schema (2) (updated)"
23+
}
24+
}
25+
}
26+
}
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
from databricks.bundles.core import Resources
2+
3+
4+
def load_resources() -> Resources:
5+
resources = Resources()
6+
7+
resources.add_schema(
8+
"my_schema_2",
9+
{
10+
"name": "My Schema (2)",
11+
"catalog_name": "my_catalog_2",
12+
},
13+
)
14+
15+
return resources
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
echo "$DATABRICKS_BUNDLES_WHEEL" > "requirements-latest.txt"
2+
3+
trace uv run $UV_ARGS -q $CLI bundle validate --output json | \
4+
jq "pick(.experimental.python, .resources)"
5+
6+
rm -fr .databricks __pycache__
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
Local = true
2+
Cloud = false # tests don't interact with APIs
3+
4+
[EnvMatrix]
5+
UV_ARGS = [
6+
# pipelines are only supported in the latest version of the wheel
7+
"--with-requirements requirements-latest.txt --no-cache",
8+
]

experimental/python/codegen/codegen/packages.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
RESOURCE_NAMESPACE = {
66
"resources.Job": "jobs",
77
"resources.Pipeline": "pipelines",
8+
"resources.Schema": "schemas",
89
"resources.Volume": "volumes",
910
}
1011

experimental/python/databricks/bundles/core/__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,12 @@
1313
"VariableOrList",
1414
"VariableOrOptional",
1515
"job_mutator",
16+
"pipeline_mutator",
17+
"schema_mutator",
1618
"load_resources_from_current_package_module",
1719
"load_resources_from_module",
1820
"load_resources_from_modules",
1921
"load_resources_from_package_module",
20-
"pipeline_mutator",
2122
"variables",
2223
"volume_mutator",
2324
]
@@ -40,6 +41,7 @@
4041
ResourceMutator,
4142
job_mutator,
4243
pipeline_mutator,
44+
schema_mutator,
4345
volume_mutator,
4446
)
4547
from databricks.bundles.core._resources import Resources

experimental/python/databricks/bundles/core/_resource_mutator.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
if TYPE_CHECKING:
99
from databricks.bundles.jobs._models.job import Job
1010
from databricks.bundles.pipelines._models.pipeline import Pipeline
11+
from databricks.bundles.schemas._models.schema import Schema
1112
from databricks.bundles.volumes._models.volume import Volume
1213

1314
_T = TypeVar("_T", bound=Resource)
@@ -130,6 +131,38 @@ def my_pipeline_mutator(bundle: Bundle, pipeline: Pipeline) -> Pipeline:
130131
return ResourceMutator(resource_type=Pipeline, function=function)
131132

132133

134+
@overload
135+
def schema_mutator(
136+
function: Callable[[Bundle, "Schema"], "Schema"],
137+
) -> ResourceMutator["Schema"]: ...
138+
139+
140+
@overload
141+
def schema_mutator(
142+
function: Callable[["Schema"], "Schema"],
143+
) -> ResourceMutator["Schema"]: ...
144+
145+
146+
def schema_mutator(function: Callable) -> ResourceMutator["Schema"]:
147+
"""
148+
Decorator for defining a schema mutator. Function should return a new instance of the schema with the desired changes,
149+
instead of mutating the input schema.
150+
151+
Example:
152+
153+
.. code-block:: python
154+
155+
@schema_mutator
156+
def my_schema_mutator(bundle: Bundle, schema: Schema) -> Schema:
157+
return replace(schema, name="my_schema")
158+
159+
:param function: Function that mutates a schema.
160+
"""
161+
from databricks.bundles.schemas._models.schema import Schema
162+
163+
return ResourceMutator(resource_type=Schema, function=function)
164+
165+
133166
@overload
134167
def volume_mutator(
135168
function: Callable[[Bundle, "Volume"], "Volume"],

0 commit comments

Comments
 (0)