diff --git a/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/databricks.yml b/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/databricks.yml new file mode 100644 index 0000000000..cdbf76cc8f --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/databricks.yml @@ -0,0 +1,25 @@ +bundle: + name: test-bundle + +# Tests implicit dependency detection for model serving endpoints: +# - ai_gateway.inference_table_config.{catalog_name, schema_name} should resolve +resources: + catalogs: + my_catalog: + name: mycatalog + schemas: + my_schema: + catalog_name: mycatalog + name: myschema + model_serving_endpoints: + my_endpoint: + name: my-endpoint + ai_gateway: + inference_table_config: + catalog_name: mycatalog + schema_name: myschema + enabled: true + +targets: + dev: + mode: development diff --git a/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/out.test.toml b/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/out.test.toml new file mode 100644 index 0000000000..d560f1de04 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform", "direct"] diff --git a/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/output.txt b/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/output.txt new file mode 100644 index 0000000000..42be076238 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/output.txt @@ -0,0 +1,27 @@ + +>>> [CLI] bundle validate -t dev -o json +{ + "catalogs": { + "my_catalog": { + "name": "mycatalog" + } + }, + "model_serving_endpoints": { + "my_endpoint": { + "ai_gateway": { + "inference_table_config": { + "catalog_name": "${resources.catalogs.my_catalog.name}", + "enabled": true, + "schema_name": "${resources.schemas.my_schema.name}" + } + }, + "name": "dev_[USERNAME]_my-endpoint" + } + }, + "schemas": { + "my_schema": { + "catalog_name": "${resources.catalogs.my_catalog.name}", + "name": "dev_[USERNAME]_myschema" + } + } +} diff --git a/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/script b/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/script new file mode 100644 index 0000000000..30cb3ec2e5 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/script @@ -0,0 +1 @@ +trace $CLI bundle validate -t dev -o json | jq .resources diff --git a/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/test.toml b/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/test.toml new file mode 100644 index 0000000000..a030353d57 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_model_serving_endpoint/test.toml @@ -0,0 +1 @@ +RecordRequests = false diff --git a/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/databricks.yml b/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/databricks.yml new file mode 100644 index 0000000000..8fe5f47176 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/databricks.yml @@ -0,0 +1,21 @@ +bundle: + name: test-bundle + +# Tests implicit dependency detection for quality monitors: +# - output_schema_name (format "catalog.schema") should resolve both parts +resources: + catalogs: + my_catalog: + name: mycatalog + schemas: + my_schema: + catalog_name: mycatalog + name: myschema + quality_monitors: + my_monitor: + table_name: mycatalog.myschema.mytable + output_schema_name: mycatalog.myschema + +targets: + dev: + mode: development diff --git a/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/out.test.toml b/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/out.test.toml new file mode 100644 index 0000000000..d560f1de04 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform", "direct"] diff --git a/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/output.txt b/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/output.txt new file mode 100644 index 0000000000..c4c40a691f --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/output.txt @@ -0,0 +1,25 @@ + +>>> [CLI] bundle validate -t dev -o json +Warning: required field "assets_dir" is not set + at resources.quality_monitors.my_monitor + in databricks.yml:16:7 + +{ + "catalogs": { + "my_catalog": { + "name": "mycatalog" + } + }, + "quality_monitors": { + "my_monitor": { + "output_schema_name": "${resources.catalogs.my_catalog.name}.${resources.schemas.my_schema.name}", + "table_name": "mycatalog.myschema.mytable" + } + }, + "schemas": { + "my_schema": { + "catalog_name": "${resources.catalogs.my_catalog.name}", + "name": "dev_[USERNAME]_myschema" + } + } +} diff --git a/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/script b/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/script new file mode 100644 index 0000000000..30cb3ec2e5 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/script @@ -0,0 +1 @@ +trace $CLI bundle validate -t dev -o json | jq .resources diff --git a/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/test.toml b/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/test.toml new file mode 100644 index 0000000000..a030353d57 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_quality_monitor/test.toml @@ -0,0 +1 @@ +RecordRequests = false diff --git a/acceptance/bundle/resource_deps/implicit_deps_registered_model/databricks.yml b/acceptance/bundle/resource_deps/implicit_deps_registered_model/databricks.yml new file mode 100644 index 0000000000..2224d140b2 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_registered_model/databricks.yml @@ -0,0 +1,23 @@ +bundle: + name: test-bundle + +# Tests implicit dependency detection for registered models: +# - registered model should implicitly depend on schema (schema_name resolved) +# - registered model should implicitly depend on catalog (catalog_name resolved) +resources: + catalogs: + my_catalog: + name: mycatalog + schemas: + my_schema: + catalog_name: mycatalog + name: myschema + registered_models: + my_model: + catalog_name: mycatalog + schema_name: myschema + name: mymodel + +targets: + dev: + mode: development diff --git a/acceptance/bundle/resource_deps/implicit_deps_registered_model/out.test.toml b/acceptance/bundle/resource_deps/implicit_deps_registered_model/out.test.toml new file mode 100644 index 0000000000..d560f1de04 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_registered_model/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform", "direct"] diff --git a/acceptance/bundle/resource_deps/implicit_deps_registered_model/output.txt b/acceptance/bundle/resource_deps/implicit_deps_registered_model/output.txt new file mode 100644 index 0000000000..86d56ae426 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_registered_model/output.txt @@ -0,0 +1,22 @@ + +>>> [CLI] bundle validate -t dev -o json +{ + "catalogs": { + "my_catalog": { + "name": "mycatalog" + } + }, + "registered_models": { + "my_model": { + "catalog_name": "${resources.catalogs.my_catalog.name}", + "name": "dev_[USERNAME]_mymodel", + "schema_name": "${resources.schemas.my_schema.name}" + } + }, + "schemas": { + "my_schema": { + "catalog_name": "${resources.catalogs.my_catalog.name}", + "name": "dev_[USERNAME]_myschema" + } + } +} diff --git a/acceptance/bundle/resource_deps/implicit_deps_registered_model/script b/acceptance/bundle/resource_deps/implicit_deps_registered_model/script new file mode 100644 index 0000000000..30cb3ec2e5 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_registered_model/script @@ -0,0 +1 @@ +trace $CLI bundle validate -t dev -o json | jq .resources diff --git a/acceptance/bundle/resource_deps/implicit_deps_registered_model/test.toml b/acceptance/bundle/resource_deps/implicit_deps_registered_model/test.toml new file mode 100644 index 0000000000..a030353d57 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_registered_model/test.toml @@ -0,0 +1 @@ +RecordRequests = false diff --git a/acceptance/bundle/resource_deps/implicit_deps_volume/databricks.yml b/acceptance/bundle/resource_deps/implicit_deps_volume/databricks.yml new file mode 100644 index 0000000000..6589107d7b --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_volume/databricks.yml @@ -0,0 +1,24 @@ +bundle: + name: test-bundle + +# Tests implicit dependency detection for volumes: +# - volume should implicitly depend on schema (schema_name resolved) +# - volume should implicitly depend on catalog (catalog_name resolved) +resources: + catalogs: + my_catalog: + name: mycatalog + schemas: + my_schema: + catalog_name: mycatalog + name: myschema + volumes: + my_volume: + catalog_name: mycatalog + schema_name: myschema + name: myvolume + volume_type: MANAGED + +targets: + dev: + mode: development diff --git a/acceptance/bundle/resource_deps/implicit_deps_volume/out.test.toml b/acceptance/bundle/resource_deps/implicit_deps_volume/out.test.toml new file mode 100644 index 0000000000..d560f1de04 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_volume/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform", "direct"] diff --git a/acceptance/bundle/resource_deps/implicit_deps_volume/output.txt b/acceptance/bundle/resource_deps/implicit_deps_volume/output.txt new file mode 100644 index 0000000000..2e454e8152 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_volume/output.txt @@ -0,0 +1,23 @@ + +>>> [CLI] bundle validate -t dev -o json +{ + "catalogs": { + "my_catalog": { + "name": "mycatalog" + } + }, + "schemas": { + "my_schema": { + "catalog_name": "${resources.catalogs.my_catalog.name}", + "name": "dev_[USERNAME]_myschema" + } + }, + "volumes": { + "my_volume": { + "catalog_name": "${resources.catalogs.my_catalog.name}", + "name": "myvolume", + "schema_name": "${resources.schemas.my_schema.name}", + "volume_type": "MANAGED" + } + } +} diff --git a/acceptance/bundle/resource_deps/implicit_deps_volume/script b/acceptance/bundle/resource_deps/implicit_deps_volume/script new file mode 100644 index 0000000000..30cb3ec2e5 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_volume/script @@ -0,0 +1 @@ +trace $CLI bundle validate -t dev -o json | jq .resources diff --git a/acceptance/bundle/resource_deps/implicit_deps_volume/test.toml b/acceptance/bundle/resource_deps/implicit_deps_volume/test.toml new file mode 100644 index 0000000000..a030353d57 --- /dev/null +++ b/acceptance/bundle/resource_deps/implicit_deps_volume/test.toml @@ -0,0 +1 @@ +RecordRequests = false diff --git a/acceptance/bundle/resources/grants/registered_models/databricks.yml.tmpl b/acceptance/bundle/resources/grants/registered_models/databricks.yml.tmpl index f94bf8fd69..f0344c4980 100644 --- a/acceptance/bundle/resources/grants/registered_models/databricks.yml.tmpl +++ b/acceptance/bundle/resources/grants/registered_models/databricks.yml.tmpl @@ -11,9 +11,8 @@ resources: name: mymodel comment: mycomment catalog_name: main - # this does not work because we don't create implicit dependency like we do with volumes: - #schema_name: myschema_$UNIQUE_NAME - schema_name: ${resources.schemas.my_schema.name} + # implicit dependency detection resolves this to ${resources.schemas.my_schema.name} + schema_name: myschema_$UNIQUE_NAME grants: - principal: deco-test-user@databricks.com privileges: ["APPLY_TAG"] diff --git a/acceptance/bundle/resources/volumes/catalog-var-ref/output.txt b/acceptance/bundle/resources/volumes/catalog-var-ref/output.txt index 56e088656d..e16db7ed26 100644 --- a/acceptance/bundle/resources/volumes/catalog-var-ref/output.txt +++ b/acceptance/bundle/resources/volumes/catalog-var-ref/output.txt @@ -28,6 +28,16 @@ } }, "resources.volumes.metadata": { + "depends_on": [ + { + "node": "resources.catalogs.main_catalog", + "label": "${resources.catalogs.main_catalog.name}" + }, + { + "node": "resources.schemas.raw_schema", + "label": "${resources.schemas.raw_schema.name}" + } + ], "action": "create", "new_state": { "value": { diff --git a/acceptance/bundle/resources/volumes/catalog-var-ref/test.toml b/acceptance/bundle/resources/volumes/catalog-var-ref/test.toml index 63b6228ab7..613ff598f6 100644 --- a/acceptance/bundle/resources/volumes/catalog-var-ref/test.toml +++ b/acceptance/bundle/resources/volumes/catalog-var-ref/test.toml @@ -1,5 +1,3 @@ -Badness = "No depends_on for volume resource" - Local = true Cloud = false RecordRequests = false diff --git a/bundle/config/mutator/resourcemutator/capture_schema_dependency.go b/bundle/config/mutator/resourcemutator/capture_schema_dependency.go deleted file mode 100644 index ef1581393a..0000000000 --- a/bundle/config/mutator/resourcemutator/capture_schema_dependency.go +++ /dev/null @@ -1,135 +0,0 @@ -package resourcemutator - -import ( - "context" - "fmt" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/cli/libs/diag" -) - -type captureSchemaDependency struct{} - -// If a user defines a UC schema in the bundle, they can refer to it in DLT pipelines -// or UC Volumes using the `${resources.schemas..name}` syntax. Using this -// syntax allows TF to capture the deploy time dependency this DLT pipeline or UC Volume -// has on the schema and deploy changes to the schema before deploying the pipeline or volume. -// -// Similarly, if a user defines a UC catalog in the bundle, they can refer to it in UC schemas -// using the `${resources.catalogs..name}` syntax. This captures the deploy time -// dependency the schema has on the catalog. -// -// This mutator translates any implicit catalog or schema references to the explicit syntax. -func CaptureSchemaDependency() bundle.Mutator { - return &captureSchemaDependency{} -} - -func (m *captureSchemaDependency) Name() string { - return "CaptureSchemaDependency" -} - -func schemaNameRef(key string) string { - return fmt.Sprintf("${resources.schemas.%s.name}", key) -} - -func catalogNameRef(key string) string { - return fmt.Sprintf("${resources.catalogs.%s.name}", key) -} - -func findSchema(b *bundle.Bundle, catalogName, schemaName string) (string, *resources.Schema) { - if catalogName == "" || schemaName == "" { - return "", nil - } - - for k, s := range b.Config.Resources.Schemas { - if s != nil && s.CatalogName == catalogName && s.Name == schemaName { - return k, s - } - } - return "", nil -} - -func resolveVolume(v *resources.Volume, b *bundle.Bundle) { - if v == nil { - return - } - schemaK, schema := findSchema(b, v.CatalogName, v.SchemaName) - if schema == nil { - return - } - - v.SchemaName = schemaNameRef(schemaK) -} - -func resolvePipelineSchema(p *resources.Pipeline, b *bundle.Bundle) { - if p == nil { - return - } - if p.Schema == "" { - return - } - schemaK, schema := findSchema(b, p.Catalog, p.Schema) - if schema == nil { - return - } - - p.Schema = schemaNameRef(schemaK) -} - -func resolvePipelineTarget(p *resources.Pipeline, b *bundle.Bundle) { - if p == nil { - return - } - if p.Target == "" { - return - } - schemaK, schema := findSchema(b, p.Catalog, p.Target) - if schema == nil { - return - } - p.Target = schemaNameRef(schemaK) -} - -func findCatalog(b *bundle.Bundle, catalogName string) (string, *resources.Catalog) { - if catalogName == "" { - return "", nil - } - - for k, c := range b.Config.Resources.Catalogs { - if c != nil && c.Name == catalogName { - return k, c - } - } - return "", nil -} - -func resolveSchema(s *resources.Schema, b *bundle.Bundle) { - if s == nil { - return - } - catalogK, catalog := findCatalog(b, s.CatalogName) - if catalog == nil { - return - } - - s.CatalogName = catalogNameRef(catalogK) -} - -func (m *captureSchemaDependency) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - for _, s := range b.Config.Resources.Schemas { - resolveSchema(s, b) - } - for _, p := range b.Config.Resources.Pipelines { - // "schema" and "target" have the same semantics in the DLT API but are mutually - // exclusive i.e. only one can be set at a time. If schema is set, the pipeline - // is in direct publishing mode and can write tables to multiple schemas - // (vs target which is limited to a single schema). - resolvePipelineTarget(p, b) - resolvePipelineSchema(p, b) - } - for _, v := range b.Config.Resources.Volumes { - resolveVolume(v, b) - } - return nil -} diff --git a/bundle/config/mutator/resourcemutator/capture_schema_dependency_test.go b/bundle/config/mutator/resourcemutator/capture_schema_dependency_test.go deleted file mode 100644 index 3993f45041..0000000000 --- a/bundle/config/mutator/resourcemutator/capture_schema_dependency_test.go +++ /dev/null @@ -1,337 +0,0 @@ -package resourcemutator - -import ( - "testing" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/databricks-sdk-go/service/catalog" - "github.com/databricks/databricks-sdk-go/service/pipelines" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestCaptureSchemaDependencyForVolume(t *testing.T) { - b := &bundle.Bundle{ - Config: config.Root{ - Resources: config.Resources{ - Schemas: map[string]*resources.Schema{ - "schema1": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "catalog1", - Name: "foobar", - }, - }, - "schema2": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "catalog2", - Name: "foobar", - }, - }, - "schema3": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "catalog1", - Name: "barfoo", - }, - }, - "nilschema": nil, - "emptyschema": {}, - }, - Volumes: map[string]*resources.Volume{ - "volume1": { - CreateVolumeRequestContent: catalog.CreateVolumeRequestContent{ - CatalogName: "catalog1", - SchemaName: "foobar", - }, - }, - "volume2": { - CreateVolumeRequestContent: catalog.CreateVolumeRequestContent{ - CatalogName: "catalog2", - SchemaName: "foobar", - }, - }, - "volume3": { - CreateVolumeRequestContent: catalog.CreateVolumeRequestContent{ - CatalogName: "catalog1", - SchemaName: "barfoo", - }, - }, - "volume4": { - CreateVolumeRequestContent: catalog.CreateVolumeRequestContent{ - CatalogName: "catalogX", - SchemaName: "foobar", - }, - }, - "volume5": { - CreateVolumeRequestContent: catalog.CreateVolumeRequestContent{ - CatalogName: "catalog1", - SchemaName: "schemaX", - }, - }, - "nilVolume": nil, - "emptyVolume": {}, - }, - }, - }, - } - - d := bundle.Apply(t.Context(), b, CaptureSchemaDependency()) - require.Nil(t, d) - - assert.Equal(t, "${resources.schemas.schema1.name}", b.Config.Resources.Volumes["volume1"].SchemaName) - assert.Equal(t, "${resources.schemas.schema2.name}", b.Config.Resources.Volumes["volume2"].SchemaName) - assert.Equal(t, "${resources.schemas.schema3.name}", b.Config.Resources.Volumes["volume3"].SchemaName) - assert.Equal(t, "foobar", b.Config.Resources.Volumes["volume4"].SchemaName) - assert.Equal(t, "schemaX", b.Config.Resources.Volumes["volume5"].SchemaName) - - assert.Nil(t, b.Config.Resources.Volumes["nilVolume"]) - // assert.Nil(t, b.Config.Resources.Volumes["emptyVolume"].CreateVolumeRequestContent) -} - -func TestCaptureSchemaDependencyForPipelinesWithTarget(t *testing.T) { - b := &bundle.Bundle{ - Config: config.Root{ - Resources: config.Resources{ - Schemas: map[string]*resources.Schema{ - "schema1": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "catalog1", - Name: "foobar", - }, - }, - "schema2": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "catalog2", - Name: "foobar", - }, - }, - "schema3": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "catalog1", - Name: "barfoo", - }, - }, - "nilschema": nil, - "emptyschema": {}, - }, - Pipelines: map[string]*resources.Pipeline{ - "pipeline1": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "catalog1", - Schema: "foobar", - }, - }, - "pipeline2": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "catalog2", - Schema: "foobar", - }, - }, - "pipeline3": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "catalog1", - Schema: "barfoo", - }, - }, - "pipeline4": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "catalogX", - Schema: "foobar", - }, - }, - "pipeline5": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "catalog1", - Schema: "schemaX", - }, - }, - "pipeline6": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "", - Schema: "foobar", - }, - }, - "pipeline7": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "", - Schema: "", - Name: "whatever", - }, - }, - "nilPipeline": nil, - "emptyPipeline": {}, - }, - }, - }, - } - - d := bundle.Apply(t.Context(), b, CaptureSchemaDependency()) - require.Nil(t, d) - - assert.Equal(t, "${resources.schemas.schema1.name}", b.Config.Resources.Pipelines["pipeline1"].Schema) - assert.Equal(t, "${resources.schemas.schema2.name}", b.Config.Resources.Pipelines["pipeline2"].Schema) - assert.Equal(t, "${resources.schemas.schema3.name}", b.Config.Resources.Pipelines["pipeline3"].Schema) - assert.Equal(t, "foobar", b.Config.Resources.Pipelines["pipeline4"].Schema) - assert.Equal(t, "schemaX", b.Config.Resources.Pipelines["pipeline5"].Schema) - assert.Equal(t, "foobar", b.Config.Resources.Pipelines["pipeline6"].Schema) - assert.Equal(t, "", b.Config.Resources.Pipelines["pipeline7"].Schema) - - assert.Nil(t, b.Config.Resources.Pipelines["nilPipeline"]) - assert.Empty(t, b.Config.Resources.Pipelines["emptyPipeline"].Catalog) - - for _, k := range []string{"pipeline1", "pipeline2", "pipeline3", "pipeline4", "pipeline5", "pipeline6", "pipeline7"} { - assert.Empty(t, b.Config.Resources.Pipelines[k].Target) - } -} - -func TestCaptureSchemaDependencyForPipelinesWithSchema(t *testing.T) { - b := &bundle.Bundle{ - Config: config.Root{ - Resources: config.Resources{ - Schemas: map[string]*resources.Schema{ - "schema1": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "catalog1", - Name: "foobar", - }, - }, - "schema2": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "catalog2", - Name: "foobar", - }, - }, - "schema3": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "catalog1", - Name: "barfoo", - }, - }, - "nilschema": nil, - "emptyschema": {}, - }, - Pipelines: map[string]*resources.Pipeline{ - "pipeline1": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "catalog1", - Target: "foobar", - }, - }, - "pipeline2": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "catalog2", - Target: "foobar", - }, - }, - "pipeline3": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "catalog1", - Target: "barfoo", - }, - }, - "pipeline4": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "catalogX", - Target: "foobar", - }, - }, - "pipeline5": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "catalog1", - Target: "schemaX", - }, - }, - "pipeline6": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "", - Target: "foobar", - }, - }, - "pipeline7": { - CreatePipeline: pipelines.CreatePipeline{ - Catalog: "", - Target: "", - Name: "whatever", - }, - }, - }, - }, - }, - } - - d := bundle.Apply(t.Context(), b, CaptureSchemaDependency()) - require.Nil(t, d) - assert.Equal(t, "${resources.schemas.schema1.name}", b.Config.Resources.Pipelines["pipeline1"].Target) - assert.Equal(t, "${resources.schemas.schema2.name}", b.Config.Resources.Pipelines["pipeline2"].Target) - assert.Equal(t, "${resources.schemas.schema3.name}", b.Config.Resources.Pipelines["pipeline3"].Target) - assert.Equal(t, "foobar", b.Config.Resources.Pipelines["pipeline4"].Target) - assert.Equal(t, "schemaX", b.Config.Resources.Pipelines["pipeline5"].Target) - assert.Equal(t, "foobar", b.Config.Resources.Pipelines["pipeline6"].Target) - assert.Equal(t, "", b.Config.Resources.Pipelines["pipeline7"].Target) - - for _, k := range []string{"pipeline1", "pipeline2", "pipeline3", "pipeline4", "pipeline5", "pipeline6", "pipeline7"} { - assert.Empty(t, b.Config.Resources.Pipelines[k].Schema) - } -} - -func TestCaptureCatalogDependencyForSchema(t *testing.T) { - b := &bundle.Bundle{ - Config: config.Root{ - Resources: config.Resources{ - Catalogs: map[string]*resources.Catalog{ - "catalog1": { - CreateCatalog: catalog.CreateCatalog{ - Name: "catalog1", - }, - }, - "catalog2": { - CreateCatalog: catalog.CreateCatalog{ - Name: "catalog2", - }, - }, - "nilcatalog": nil, - "emptycatalog": {}, - }, - Schemas: map[string]*resources.Schema{ - "schema1": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "catalog1", - Name: "schema1", - }, - }, - "schema2": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "catalog2", - Name: "schema2", - }, - }, - "schema3": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "catalogX", - Name: "schema3", - }, - }, - "schema4": { - CreateSchema: catalog.CreateSchema{ - CatalogName: "", - Name: "schema4", - }, - }, - "nilschema": nil, - "emptyschema": {}, - }, - }, - }, - } - - d := bundle.Apply(t.Context(), b, CaptureSchemaDependency()) - require.Nil(t, d) - - assert.Equal(t, "${resources.catalogs.catalog1.name}", b.Config.Resources.Schemas["schema1"].CatalogName) - assert.Equal(t, "${resources.catalogs.catalog2.name}", b.Config.Resources.Schemas["schema2"].CatalogName) - assert.Equal(t, "catalogX", b.Config.Resources.Schemas["schema3"].CatalogName) - assert.Equal(t, "", b.Config.Resources.Schemas["schema4"].CatalogName) - - assert.Nil(t, b.Config.Resources.Schemas["nilschema"]) -} diff --git a/bundle/config/mutator/resourcemutator/capture_uc_dependencies.go b/bundle/config/mutator/resourcemutator/capture_uc_dependencies.go new file mode 100644 index 0000000000..92d22333e7 --- /dev/null +++ b/bundle/config/mutator/resourcemutator/capture_uc_dependencies.go @@ -0,0 +1,161 @@ +package resourcemutator + +import ( + "context" + "fmt" + "strings" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/libs/diag" +) + +type captureUCDependencies struct{} + +// If a user defines a UC schema in the bundle, they can refer to it in DLT pipelines, +// UC Volumes, Registered Models, Quality Monitors, or Model Serving Endpoints using the +// `${resources.schemas..name}` syntax. Using this syntax allows TF to capture +// the deploy time dependency this resource has on the schema and deploy changes to the +// schema before deploying the dependent resource. +// +// Similarly, if a user defines a UC catalog in the bundle, they can refer to it in UC schemas, +// UC Volumes, Registered Models, or Model Serving Endpoints using the +// `${resources.catalogs..name}` syntax. This captures the deploy time +// dependency the resource has on the catalog. +// +// This mutator translates any implicit catalog or schema references to the explicit syntax. +func CaptureUCDependencies() bundle.Mutator { + return &captureUCDependencies{} +} + +func (m *captureUCDependencies) Name() string { + return "CaptureUCDependencies" +} + +func schemaNameRef(key string) string { + return fmt.Sprintf("${resources.schemas.%s.name}", key) +} + +func catalogNameRef(key string) string { + return fmt.Sprintf("${resources.catalogs.%s.name}", key) +} + +func findSchema(b *bundle.Bundle, catalogName, schemaName string) (string, *resources.Schema) { + if catalogName == "" || schemaName == "" { + return "", nil + } + + for k, s := range b.Config.Resources.Schemas { + if s != nil && s.CatalogName == catalogName && s.Name == schemaName { + return k, s + } + } + return "", nil +} + +func findCatalog(b *bundle.Bundle, catalogName string) (string, *resources.Catalog) { + if catalogName == "" { + return "", nil + } + + for k, c := range b.Config.Resources.Catalogs { + if c != nil && c.Name == catalogName { + return k, c + } + } + return "", nil +} + +// resolveSchema returns the explicit schema reference if the given catalogName +// and schemaName match a schema defined in the bundle. Otherwise returns schemaName +// unchanged. Must be called before resolveCatalog on the same resource since +// findSchema needs the original (unmutated) catalogName. +func resolveSchema(b *bundle.Bundle, catalogName, schemaName string) string { + k, s := findSchema(b, catalogName, schemaName) + if s != nil { + return schemaNameRef(k) + } + return schemaName +} + +// resolveCatalog returns the explicit catalog reference if catalogName matches +// a catalog defined in the bundle. Otherwise returns catalogName unchanged. +func resolveCatalog(b *bundle.Bundle, catalogName string) string { + k, c := findCatalog(b, catalogName) + if c != nil { + return catalogNameRef(k) + } + return catalogName +} + +func (m *captureUCDependencies) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + // Resolve resources that depend on schemas before resolving schemas themselves. + // The schema resolution below modifies schema.CatalogName, and findSchema + // (used by resolveSchema) matches against the original schema.CatalogName value. + for _, v := range b.Config.Resources.Volumes { + if v == nil { + continue + } + v.SchemaName = resolveSchema(b, v.CatalogName, v.SchemaName) + v.CatalogName = resolveCatalog(b, v.CatalogName) + } + for _, rm := range b.Config.Resources.RegisteredModels { + if rm == nil { + continue + } + rm.SchemaName = resolveSchema(b, rm.CatalogName, rm.SchemaName) + rm.CatalogName = resolveCatalog(b, rm.CatalogName) + } + for _, p := range b.Config.Resources.Pipelines { + if p == nil { + continue + } + // "schema" and "target" have the same semantics in the DLT API but are mutually + // exclusive i.e. only one can be set at a time. + p.Schema = resolveSchema(b, p.Catalog, p.Schema) + p.Target = resolveSchema(b, p.Catalog, p.Target) + p.Catalog = resolveCatalog(b, p.Catalog) + } + for _, qm := range b.Config.Resources.QualityMonitors { + if qm == nil || qm.OutputSchemaName == "" { + continue + } + // OutputSchemaName is a compound "catalog.schema" string. + parts := strings.SplitN(qm.OutputSchemaName, ".", 2) + if len(parts) != 2 { + continue + } + catalogName, schemaName := parts[0], parts[1] + resolved := resolveCatalog(b, catalogName) + "." + resolveSchema(b, catalogName, schemaName) + if resolved != qm.OutputSchemaName { + qm.OutputSchemaName = resolved + } + } + for _, mse := range b.Config.Resources.ModelServingEndpoints { + if mse == nil { + continue + } + if mse.AiGateway != nil && mse.AiGateway.InferenceTableConfig != nil { + itc := mse.AiGateway.InferenceTableConfig + itc.SchemaName = resolveSchema(b, itc.CatalogName, itc.SchemaName) + itc.CatalogName = resolveCatalog(b, itc.CatalogName) + } + // AutoCaptureConfig is deprecated but still in use. + if mse.Config != nil && mse.Config.AutoCaptureConfig != nil { + acc := mse.Config.AutoCaptureConfig + acc.SchemaName = resolveSchema(b, acc.CatalogName, acc.SchemaName) + acc.CatalogName = resolveCatalog(b, acc.CatalogName) + } + } + + // Schemas are resolved last because the schema catalog resolution modifies + // schema.CatalogName, and findSchema (used by resolveSchema above) matches + // against the original schema.CatalogName value. + for _, s := range b.Config.Resources.Schemas { + if s == nil { + continue + } + s.CatalogName = resolveCatalog(b, s.CatalogName) + } + return nil +} diff --git a/bundle/config/mutator/resourcemutator/capture_uc_dependencies_test.go b/bundle/config/mutator/resourcemutator/capture_uc_dependencies_test.go new file mode 100644 index 0000000000..12aac0c472 --- /dev/null +++ b/bundle/config/mutator/resourcemutator/capture_uc_dependencies_test.go @@ -0,0 +1,299 @@ +package resourcemutator + +import ( + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/databricks/databricks-sdk-go/service/pipelines" + "github.com/databricks/databricks-sdk-go/service/serving" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// Shared bundle with schemas for resolveSchema tests. +func bundleWithSchemas() *bundle.Bundle { + return &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Schemas: map[string]*resources.Schema{ + "schema1": {CreateSchema: catalog.CreateSchema{CatalogName: "catalog1", Name: "foobar"}}, + "schema2": {CreateSchema: catalog.CreateSchema{CatalogName: "catalog2", Name: "foobar"}}, + "schema3": {CreateSchema: catalog.CreateSchema{CatalogName: "catalog1", Name: "barfoo"}}, + }, + }, + }, + } +} + +// Shared bundle with catalogs for resolveCatalog tests. +func bundleWithCatalogs() *bundle.Bundle { + return &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Catalogs: map[string]*resources.Catalog{ + "dev_catalog": {CreateCatalog: catalog.CreateCatalog{Name: "catalog1"}}, + "prod_catalog": {CreateCatalog: catalog.CreateCatalog{Name: "catalog2"}}, + }, + }, + }, + } +} + +func TestResolveSchema(t *testing.T) { + b := bundleWithSchemas() + + tests := []struct { + name string + catalogName string + schemaName string + expected string + }{ + {"match_catalog1_foobar", "catalog1", "foobar", "${resources.schemas.schema1.name}"}, + {"match_catalog2_foobar", "catalog2", "foobar", "${resources.schemas.schema2.name}"}, + {"match_catalog1_barfoo", "catalog1", "barfoo", "${resources.schemas.schema3.name}"}, + {"no_match_wrong_catalog", "catalogX", "foobar", "foobar"}, + {"no_match_wrong_schema", "catalog1", "schemaX", "schemaX"}, + {"empty_catalog", "", "foobar", "foobar"}, + {"empty_schema", "catalog1", "", ""}, + {"both_empty", "", "", ""}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.expected, resolveSchema(b, tc.catalogName, tc.schemaName)) + }) + } +} + +func TestResolveCatalog(t *testing.T) { + b := bundleWithCatalogs() + + tests := []struct { + name string + catalogName string + expected string + }{ + {"match_catalog1", "catalog1", "${resources.catalogs.dev_catalog.name}"}, + {"match_catalog2", "catalog2", "${resources.catalogs.prod_catalog.name}"}, + {"no_match", "catalogX", "catalogX"}, + {"empty", "", ""}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.expected, resolveCatalog(b, tc.catalogName)) + }) + } +} + +// Test that all resource types are wired correctly by defining a catalog, schema, +// and one of each resource type in a single bundle. Also verifies the ordering fix: +// schemas must be resolved last since their CatalogName gets mutated. +func TestCaptureUCDependencies(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Catalogs: map[string]*resources.Catalog{ + "my_catalog": {CreateCatalog: catalog.CreateCatalog{Name: "mycatalog"}}, + }, + Schemas: map[string]*resources.Schema{ + "my_schema": {CreateSchema: catalog.CreateSchema{CatalogName: "mycatalog", Name: "myschema"}}, + }, + Volumes: map[string]*resources.Volume{ + "my_volume": {CreateVolumeRequestContent: catalog.CreateVolumeRequestContent{ + CatalogName: "mycatalog", SchemaName: "myschema", + }}, + }, + RegisteredModels: map[string]*resources.RegisteredModel{ + "my_model": {CreateRegisteredModelRequest: catalog.CreateRegisteredModelRequest{ + CatalogName: "mycatalog", SchemaName: "myschema", + }}, + }, + Pipelines: map[string]*resources.Pipeline{ + "my_pipeline": {CreatePipeline: pipelines.CreatePipeline{ + Catalog: "mycatalog", Schema: "myschema", + }}, + }, + QualityMonitors: map[string]*resources.QualityMonitor{ + "my_monitor": {CreateMonitor: catalog.CreateMonitor{ + OutputSchemaName: "mycatalog.myschema", + }}, + }, + ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ + "my_endpoint": {CreateServingEndpoint: serving.CreateServingEndpoint{ + AiGateway: &serving.AiGatewayConfig{ + InferenceTableConfig: &serving.AiGatewayInferenceTableConfig{ + CatalogName: "mycatalog", SchemaName: "myschema", + }, + }, + }}, + }, + }, + }, + } + + d := bundle.Apply(t.Context(), b, CaptureUCDependencies()) + require.Nil(t, d) + + schemaRef := "${resources.schemas.my_schema.name}" + catalogRef := "${resources.catalogs.my_catalog.name}" + + // Schema catalog dependency. + assert.Equal(t, catalogRef, b.Config.Resources.Schemas["my_schema"].CatalogName) + + // Volume. + assert.Equal(t, schemaRef, b.Config.Resources.Volumes["my_volume"].SchemaName) + assert.Equal(t, catalogRef, b.Config.Resources.Volumes["my_volume"].CatalogName) + + // Registered model. + assert.Equal(t, schemaRef, b.Config.Resources.RegisteredModels["my_model"].SchemaName) + assert.Equal(t, catalogRef, b.Config.Resources.RegisteredModels["my_model"].CatalogName) + + // Pipeline. + assert.Equal(t, schemaRef, b.Config.Resources.Pipelines["my_pipeline"].Schema) + assert.Equal(t, catalogRef, b.Config.Resources.Pipelines["my_pipeline"].Catalog) + + // Quality monitor (compound "catalog.schema" field). + assert.Equal(t, catalogRef+"."+schemaRef, b.Config.Resources.QualityMonitors["my_monitor"].OutputSchemaName) + + // Model serving endpoint. + itc := b.Config.Resources.ModelServingEndpoints["my_endpoint"].AiGateway.InferenceTableConfig + assert.Equal(t, schemaRef, itc.SchemaName) + assert.Equal(t, catalogRef, itc.CatalogName) +} + +// Pipeline schema and target are mutually exclusive; only the populated field +// should be resolved. +func TestCaptureUCDependenciesPipelineSchemaTarget(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Schemas: map[string]*resources.Schema{ + "s": {CreateSchema: catalog.CreateSchema{CatalogName: "c", Name: "n"}}, + }, + Pipelines: map[string]*resources.Pipeline{ + "with_schema": {CreatePipeline: pipelines.CreatePipeline{Catalog: "c", Schema: "n"}}, + "with_target": {CreatePipeline: pipelines.CreatePipeline{Catalog: "c", Target: "n"}}, + }, + }, + }, + } + + d := bundle.Apply(t.Context(), b, CaptureUCDependencies()) + require.Nil(t, d) + + ref := "${resources.schemas.s.name}" + + assert.Equal(t, ref, b.Config.Resources.Pipelines["with_schema"].Schema) + assert.Empty(t, b.Config.Resources.Pipelines["with_schema"].Target) + + assert.Equal(t, ref, b.Config.Resources.Pipelines["with_target"].Target) + assert.Empty(t, b.Config.Resources.Pipelines["with_target"].Schema) +} + +func TestCaptureUCDependenciesQualityMonitorEdgeCases(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Catalogs: map[string]*resources.Catalog{ + "my_catalog": {CreateCatalog: catalog.CreateCatalog{Name: "mycatalog"}}, + }, + Schemas: map[string]*resources.Schema{ + "my_schema": {CreateSchema: catalog.CreateSchema{CatalogName: "mycatalog", Name: "myschema"}}, + }, + QualityMonitors: map[string]*resources.QualityMonitor{ + "catalog_only": {CreateMonitor: catalog.CreateMonitor{OutputSchemaName: "mycatalog.other"}}, + "no_match": {CreateMonitor: catalog.CreateMonitor{OutputSchemaName: "other.other"}}, + "empty": {CreateMonitor: catalog.CreateMonitor{OutputSchemaName: ""}}, + "no_dot": {CreateMonitor: catalog.CreateMonitor{OutputSchemaName: "nodot"}}, + "nil_monitor": nil, + }, + }, + }, + } + + d := bundle.Apply(t.Context(), b, CaptureUCDependencies()) + require.Nil(t, d) + + assert.Equal(t, "${resources.catalogs.my_catalog.name}.other", b.Config.Resources.QualityMonitors["catalog_only"].OutputSchemaName) + assert.Equal(t, "other.other", b.Config.Resources.QualityMonitors["no_match"].OutputSchemaName) + assert.Equal(t, "", b.Config.Resources.QualityMonitors["empty"].OutputSchemaName) + assert.Equal(t, "nodot", b.Config.Resources.QualityMonitors["no_dot"].OutputSchemaName) + assert.Nil(t, b.Config.Resources.QualityMonitors["nil_monitor"]) +} + +func TestCaptureUCDependenciesModelServingEndpointEdgeCases(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Catalogs: map[string]*resources.Catalog{ + "my_catalog": {CreateCatalog: catalog.CreateCatalog{Name: "mycatalog"}}, + }, + Schemas: map[string]*resources.Schema{ + "my_schema": {CreateSchema: catalog.CreateSchema{CatalogName: "mycatalog", Name: "myschema"}}, + }, + ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ + // AutoCaptureConfig path. + "auto_capture": {CreateServingEndpoint: serving.CreateServingEndpoint{ + Config: &serving.EndpointCoreConfigInput{ + AutoCaptureConfig: &serving.AutoCaptureConfigInput{ + CatalogName: "mycatalog", SchemaName: "myschema", + }, + }, + }}, + // No match. + "no_match": {CreateServingEndpoint: serving.CreateServingEndpoint{ + AiGateway: &serving.AiGatewayConfig{ + InferenceTableConfig: &serving.AiGatewayInferenceTableConfig{ + CatalogName: "other", SchemaName: "other", + }, + }, + }}, + // Various nil nesting levels. + "nil_gateway": {CreateServingEndpoint: serving.CreateServingEndpoint{}}, + "nil_inference_table": {CreateServingEndpoint: serving.CreateServingEndpoint{AiGateway: &serving.AiGatewayConfig{}}}, + "nil_endpoint": nil, + }, + }, + }, + } + + d := bundle.Apply(t.Context(), b, CaptureUCDependencies()) + require.Nil(t, d) + + schemaRef := "${resources.schemas.my_schema.name}" + catalogRef := "${resources.catalogs.my_catalog.name}" + + acc := b.Config.Resources.ModelServingEndpoints["auto_capture"].Config.AutoCaptureConfig + assert.Equal(t, schemaRef, acc.SchemaName) + assert.Equal(t, catalogRef, acc.CatalogName) + + itc := b.Config.Resources.ModelServingEndpoints["no_match"].AiGateway.InferenceTableConfig + assert.Equal(t, "other", itc.CatalogName) + assert.Equal(t, "other", itc.SchemaName) + + assert.Nil(t, b.Config.Resources.ModelServingEndpoints["nil_endpoint"]) +} + +// Nil and empty resources should not panic. +func TestCaptureUCDependenciesNilResources(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Schemas: map[string]*resources.Schema{"nil": nil, "empty": {}}, + Catalogs: map[string]*resources.Catalog{"nil": nil, "empty": {}}, + Volumes: map[string]*resources.Volume{"nil": nil, "empty": {}}, + RegisteredModels: map[string]*resources.RegisteredModel{"nil": nil, "empty": {}}, + Pipelines: map[string]*resources.Pipeline{"nil": nil, "empty": {}}, + QualityMonitors: map[string]*resources.QualityMonitor{"nil": nil, "empty": {}}, + ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{"nil": nil, "empty": {}}, + }, + }, + } + + d := bundle.Apply(t.Context(), b, CaptureUCDependencies()) + require.Nil(t, d) +} diff --git a/bundle/config/mutator/resourcemutator/resource_mutator.go b/bundle/config/mutator/resourcemutator/resource_mutator.go index 87069d6f84..9616de202a 100644 --- a/bundle/config/mutator/resourcemutator/resource_mutator.go +++ b/bundle/config/mutator/resourcemutator/resource_mutator.go @@ -171,10 +171,11 @@ func applyNormalizeMutators(ctx context.Context, b *bundle.Bundle) { // Updates (dynamic): same paths — merges grant entries by principal and deduplicates privileges MergeGrants(), - // Reads (typed): resources.pipelines.*.{catalog,schema,target}, resources.volumes.*.{catalog_name,schema_name} (checks for schema references) - // Updates (typed): resources.pipelines.*.{schema,target}, resources.volumes.*.schema_name (converts implicit schema references to explicit ${resources.schemas..name} syntax) - // Translates implicit schema references in DLT pipelines or UC Volumes to explicit syntax to capture dependencies - CaptureSchemaDependency(), + // Reads (typed): resources.{volumes,registered_models,pipelines,quality_monitors,model_serving_endpoints}.*.{catalog_name,schema_name,...} + // Updates (typed): same paths — converts implicit schema/catalog references to explicit ${resources.schemas/catalogs..name} syntax + // Also updates: resources.schemas.*.catalog_name (catalog dependency for schemas) + // Translates implicit schema and catalog references across all UC resources to explicit syntax to capture dependencies + CaptureUCDependencies(), // Reads (dynamic): resources.dashboards.*.file_path // Updates (dynamic): resources.dashboards.*.serialized_dashboard