Skip to content

Commit 35c156e

Browse files
committed
DLT init reusing lakeflow template
1 parent 523c76d commit 35c156e

File tree

6 files changed

+115
-27
lines changed

6 files changed

+115
-27
lines changed

cmd/dlt/dlt.go

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,12 @@
11
package dlt
22

33
import (
4-
"context"
4+
"log/slog"
55

66
"github.com/databricks/cli/libs/cmdio"
7+
"github.com/databricks/cli/libs/flags"
8+
"github.com/databricks/cli/libs/log"
9+
"github.com/databricks/cli/libs/log/handler"
710
"github.com/spf13/cobra"
811
)
912

@@ -12,21 +15,28 @@ func New() *cobra.Command {
1215
Use: "dlt",
1316
Short: "DLT CLI",
1417
Long: "DLT CLI (stub, to be filled in)",
18+
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
19+
// Initialize cmdio context
20+
cmdIO := cmdio.NewIO(cmd.Context(), flags.OutputText, cmd.InOrStdin(), cmd.OutOrStdout(), cmd.ErrOrStderr(), "", "")
21+
ctx := cmdio.InContext(cmd.Context(), cmdIO)
22+
23+
// Set up logger with WARN level
24+
h := handler.NewFriendlyHandler(cmd.ErrOrStderr(), &handler.Options{
25+
Color: cmdio.IsTTY(cmd.ErrOrStderr()),
26+
Level: log.LevelWarn,
27+
})
28+
logger := slog.New(h)
29+
ctx = log.NewContext(ctx, logger)
30+
31+
cmd.SetContext(ctx)
32+
return nil
33+
},
1534
Run: func(cmd *cobra.Command, args []string) {
1635
_ = cmd.Help()
1736
},
1837
}
1938

20-
// Add 'init' stub command (same description as bundle init)
21-
initCmd := &cobra.Command{
22-
Use: "init",
23-
Short: "Initialize a new DLT project in the current directory",
24-
Long: "Initialize a new DLT project in the current directory. This is a stub for future implementation.",
25-
Run: func(cmd *cobra.Command, args []string) {
26-
cmdio.LogString(context.Background(), "dlt init is not yet implemented. This will initialize a new DLT project in the future.")
27-
},
28-
}
29-
cmd.AddCommand(initCmd)
39+
cmd.AddCommand(initCommand())
3040

3141
return cmd
3242
}

cmd/dlt/init.go

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
package dlt
2+
3+
import (
4+
"github.com/databricks/cli/cmd/root"
5+
"github.com/databricks/cli/libs/template"
6+
"github.com/spf13/cobra"
7+
)
8+
9+
func initCommand() *cobra.Command {
10+
var outputDir string
11+
var configFile string
12+
cmd := &cobra.Command{
13+
Use: "init",
14+
Short: "Initialize a new DLT project",
15+
PreRunE: root.MustWorkspaceClient,
16+
RunE: func(cmd *cobra.Command, args []string) error {
17+
ctx := cmd.Context()
18+
19+
r := template.Resolver{
20+
TemplatePathOrUrl: "lakeflow-pipelines",
21+
ConfigFile: configFile,
22+
OutputDir: outputDir,
23+
}
24+
25+
tmpl, err := r.Resolve(ctx)
26+
if err != nil {
27+
return err
28+
}
29+
defer tmpl.Reader.Cleanup(ctx)
30+
31+
err = tmpl.Writer.PromptForInput(ctx, tmpl.Reader)
32+
if err != nil {
33+
return err
34+
}
35+
tmpl.Writer.SetConfig("is_dlt", true)
36+
err = tmpl.Writer.Finalize(ctx)
37+
if err != nil {
38+
return err
39+
}
40+
return nil
41+
},
42+
}
43+
cmd.Flags().StringVar(&outputDir, "output-dir", "", "Directory to write the initialized template to")
44+
cmd.Flags().StringVar(&configFile, "config-file", "", "JSON file containing key value pairs of input parameters required for template initialization")
45+
return cmd
46+
}

libs/template/templates/lakeflow-pipelines/databricks_template_schema.json

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,12 @@
99
"pattern": "^[a-z0-9_]+$",
1010
"pattern_match_failure_message": "Name must consist of lower case letters, numbers, and underscores."
1111
},
12+
"is_dlt": {
13+
"skip_prompt_if": {},
14+
"type": "boolean",
15+
"description": "DLT pipelines using this template",
16+
"default": false
17+
},
1218
"default_catalog": {
1319
"type": "string",
1420
"default": "{{default_catalog}}",

libs/template/templates/lakeflow-pipelines/template/__preamble.tmpl

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ This file only contains template directives; it is skipped for the actual output
55
{{skip "__preamble"}}
66

77
{{$isSQL := eq .language "sql"}}
8+
{{$isDLT := eq .is_dlt true}}
89

910
{{if $isSQL}}
1011
{{skip "{{.project_name}}/resources/{{.project_name}}_pipeline/utilities/utils.py"}}
@@ -14,3 +15,7 @@ This file only contains template directives; it is skipped for the actual output
1415
{{skip "{{.project_name}}/resources/{{.project_name}}_pipeline/transformations/sample_zones_{{.project_name}}.sql"}}
1516
{{skip "{{.project_name}}/resources/{{.project_name}}_pipeline/transformations/sample_trips_{{.project_name}}.sql"}}
1617
{{end}}
18+
19+
{{if $isDLT}}
20+
{{skip "{{.project_name}}/resources/{{.project_name}}_pipeline/{{.project_name}}.job.yml"}}
21+
{{end}}

libs/template/templates/lakeflow-pipelines/template/{{.project_name}}/databricks.yml.tmpl

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
# This is a Databricks asset bundle definition for {{.project_name}}.
22
# See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
3-
bundle:
3+
{{$isDLT := eq .is_dlt true}}{{if $isDLT}}project{{else}}bundle{{end}}:
44
name: {{.project_name}}
5-
uuid: {{bundle_uuid}}
5+
{{if not $isDLT}}uuid: {{bundle_uuid}}{{end}}
66

77
include:
8-
- resources/*.yml
9-
- resources/*/*.yml
8+
{{if $isDLT}}- ./*.yml{{else}}- resources/*.yml
9+
- resources/*/*.yml{{end}}
1010

1111
# Variable declarations. These variables are assigned in the dev/prod targets below.
1212
variables:
@@ -25,6 +25,7 @@ targets:
2525
# See also https://docs.databricks.com/dev-tools/bundles/deployment-modes.html.
2626
mode: development
2727
default: true
28+
{{if $isDLT}}deploy_on_run: true{{end}}
2829
workspace:
2930
host: {{workspace_host}}
3031
variables:
@@ -36,11 +37,11 @@ targets:
3637
mode: production
3738
workspace:
3839
host: {{workspace_host}}
39-
# We explicitly deploy to /Workspace/Users/{{user_name}} to make sure we only have a single copy.
40-
root_path: /Workspace/Users/{{user_name}}/.bundle/${bundle.name}/${bundle.target}
41-
permissions:
40+
{{if not $isDLT}}# We explicitly deploy to /Workspace/Users/{{user_name}} to make sure we only have a single copy.
41+
root_path: /Workspace/Users/{{user_name}}/.bundle/${bundle.name}/${bundle.target}}{{end}}
42+
{{if $isDLT}}owner: user@company.com{{else}}permissions:
4243
- {{if is_service_principal}}service_principal{{else}}user{{end}}_name: {{user_name}}
43-
level: CAN_MANAGE
44+
level: CAN_MANAGE{{end}}
4445
variables:
4546
catalog: {{.default_catalog}}
4647
schema: {{template `prod_schema` .}}

libs/template/writer.go

Lines changed: 28 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -32,11 +32,20 @@ type Writer interface {
3232
// 2. The output directory where the template will be materialized.
3333
Configure(ctx context.Context, configPath, outputDir string) error
3434

35+
// Finalize walks the template file tree, persists files to disk, and prints the success message.
36+
Finalize(ctx context.Context) error
37+
3538
// Materialize the template to the local file system.
3639
Materialize(ctx context.Context, r Reader) error
3740

3841
// Log telemetry for the template initialization event.
3942
LogTelemetry(ctx context.Context)
43+
44+
// SetConfig sets a value in the template's config.
45+
SetConfig(key string, value any)
46+
47+
// PromptForInput prompts the user for any missing config values.
48+
PromptForInput(ctx context.Context, reader Reader) error
4049
}
4150

4251
type defaultWriter struct {
@@ -49,6 +58,13 @@ type defaultWriter struct {
4958
renderer *renderer
5059
}
5160

61+
// SetConfig sets a value in the template's config.
62+
func (tmpl *defaultWriter) SetConfig(key string, value any) {
63+
if tmpl.renderer != nil {
64+
tmpl.renderer.config[key] = value
65+
}
66+
}
67+
5268
func constructOutputFiler(ctx context.Context, outputDir string) (filer.Filer, error) {
5369
outputDir, err := filepath.Abs(outputDir)
5470
if err != nil {
@@ -81,7 +97,7 @@ func (tmpl *defaultWriter) Configure(ctx context.Context, configPath, outputDir
8197
return nil
8298
}
8399

84-
func (tmpl *defaultWriter) promptForInput(ctx context.Context, reader Reader) error {
100+
func (tmpl *defaultWriter) PromptForInput(ctx context.Context, reader Reader) error {
85101
readerFs, err := reader.FS(ctx)
86102
if err != nil {
87103
return err
@@ -143,15 +159,10 @@ func (tmpl *defaultWriter) printSuccessMessage(ctx context.Context) error {
143159
return nil
144160
}
145161

146-
func (tmpl *defaultWriter) Materialize(ctx context.Context, reader Reader) error {
147-
err := tmpl.promptForInput(ctx, reader)
148-
if err != nil {
149-
return err
150-
}
151-
162+
func (tmpl *defaultWriter) Finalize(ctx context.Context) error {
152163
// Walk the template file tree and compute in-memory representations of the
153164
// output files.
154-
err = tmpl.renderer.walk()
165+
err := tmpl.renderer.walk()
155166
if err != nil {
156167
return err
157168
}
@@ -165,6 +176,15 @@ func (tmpl *defaultWriter) Materialize(ctx context.Context, reader Reader) error
165176
return tmpl.printSuccessMessage(ctx)
166177
}
167178

179+
func (tmpl *defaultWriter) Materialize(ctx context.Context, reader Reader) error {
180+
err := tmpl.PromptForInput(ctx, reader)
181+
if err != nil {
182+
return err
183+
}
184+
185+
return tmpl.Finalize(ctx)
186+
}
187+
168188
func (tmpl *defaultWriter) LogTelemetry(ctx context.Context) {
169189
telemetry.Log(ctx, protos.DatabricksCliLog{
170190
BundleInitEvent: &protos.BundleInitEvent{

0 commit comments

Comments
 (0)